From: <js...@us...> - 2007-10-29 12:53:24
|
Revision: 4042 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4042&view=rev Author: jswhit Date: 2007-10-29 05:53:17 -0700 (Mon, 29 Oct 2007) Log Message: ----------- numpification and fixes for rotate_vector (EF) Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-10-28 19:03:49 UTC (rev 4041) +++ trunk/toolkits/basemap/Changelog 2007-10-29 12:53:17 UTC (rev 4042) @@ -1,3 +1,7 @@ +version 0.9.7 (not yet released) + * fix rotate_vector so it works in S. Hem and for non-orthogonal + grids (EF) + * numpification (EF) version 0.9.6 (svn revision 3888) * fix addcyclic function so it handles masked arrays. * labelling of meridians and parallels now works with Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-10-28 19:03:49 UTC (rev 4041) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-10-29 12:53:17 UTC (rev 4042) @@ -12,6 +12,7 @@ from proj import Proj import matplotlib.numerix as NX from matplotlib.numerix import ma +import numpy as npy from numpy import linspace from matplotlib.numerix.mlab import squeeze from matplotlib.cbook import popd, is_scalar @@ -20,7 +21,7 @@ # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) -__version__ = '0.9.6' +__version__ = '0.9.7' # test to see numerix set to use numpy (if not, raise an error) if NX.which[0] != 'numpy': @@ -2175,13 +2176,33 @@ uin = interp(uin,lons,lats,lonsout,latsout,checkbounds=checkbounds,order=order,masked=masked) vin = interp(vin,lons,lats,lonsout,latsout,checkbounds=checkbounds,order=order,masked=masked) # rotate from geographic to map coordinates. - delta = 0.1 # increment in latitude used to estimate derivatives. - xn,yn = self(lonsout,NX.where(latsout+delta<90.,latsout+delta,latsout-delta)) - # northangle is the angle between true north and the y axis. - northangle = NX.where(lats+delta<90, NX.arctan2(xn-x, yn-y), - NX.arctan2(x-xn, y-yn)) - uout = uin*NX.cos(northangle) + vin*NX.sin(northangle) - vout = vin*NX.cos(northangle) - uin*NX.sin(northangle) + if ma.isMaskedArray(uin): + mask = ma.getmaskarray(uin) + uin = uin.filled(1) + vin = vin.filled(1) + masked = True # override kwarg with reality + uvc = uin + 1j*vin + uvmag = npy.abs(uvc) + delta = 0.1 # increment in longitude + dlon = delta*uin/uvmag + dlat = delta*(vin/uvmag)*npy.cos(latsout*npy.pi/180.0) + farnorth = latsout+dlat >= 90.0 + somenorth = farnorth.any() + if somenorth: + dlon[farnorth] *= -1.0 + dlat[farnorth] *= -1.0 + lon1 = lonsout + dlon + lat1 = latsout + dlat + xn, yn = self(lon1, lat1) + vecangle = npy.arctan2(yn-y, xn-x) + if somenorth: + vecangle[farnorth] += npy.pi + uvcout = uvmag * npy.exp(1j*vecangle) + uout = uvcout.real + vout = uvcout.imag + if masked: + uout = ma.array(uout, mask=mask) + vout = ma.array(vout, mask=mask) if returnxy: return uout,vout,x,y else: @@ -2210,12 +2231,35 @@ """ x, y = self(lons, lats) # rotate from geographic to map coordinates. - delta = 0.1 # increment in latitude used to estimate derivatives. - xn,yn = self(lons,NX.where(lats+delta<90.,lats+delta,lats-delta)) - northangle = NX.where(lats+delta<90, NX.arctan2(xn-x, yn-y), - NX.arctan2(x-xn, y-yn)) - uout = uin*NX.cos(northangle) + vin*NX.sin(northangle) - vout = vin*NX.cos(northangle) - uin*NX.sin(northangle) + if ma.isMaskedArray(uin): + mask = ma.getmaskarray(uin) + masked = True + uin = uin.filled(1) + vin = vin.filled(1) + else: + masked = False + uvc = uin + 1j*vin + uvmag = npy.abs(uvc) + delta = 0.1 # increment in longitude + dlon = delta*uin/uvmag + dlat = delta*(vin/uvmag)*npy.cos(lats*npy.pi/180.0) + farnorth = lats+dlat >= 90.0 + somenorth = farnorth.any() + if somenorth: + dlon[farnorth] *= -1.0 + dlat[farnorth] *= -1.0 + lon1 = lons + dlon + lat1 = lats + dlat + xn, yn = self(lon1, lat1) + vecangle = npy.arctan2(yn-y, xn-x) + if somenorth: + vecangle[farnorth] += npy.pi + uvcout = uvmag * npy.exp(1j*vecangle) + uout = uvcout.real + vout = uvcout.imag + if masked: + uout = ma.array(uout, mask=mask) + vout = ma.array(vout, mask=mask) if returnxy: return uout,vout,x,y else: Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-10-28 19:03:49 UTC (rev 4041) +++ trunk/toolkits/basemap/setup.py 2007-10-29 12:53:17 UTC (rev 4042) @@ -88,7 +88,7 @@ package_data = {'matplotlib.toolkits.basemap':pyproj_datafiles+basemap_datafiles} setup( name = "basemap", - version = "0.9.6", + version = "0.9.7", description = "Plot data on map projections with matplotlib", long_description = """ An add-on toolkit for matplotlib that lets you plot data This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <ef...@us...> - 2007-10-31 05:56:09
|
Revision: 4070 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4070&view=rev Author: efiring Date: 2007-10-30 22:56:06 -0700 (Tue, 30 Oct 2007) Log Message: ----------- Add numpy Nx2 array support to the pyrex proj wrapper Modified Paths: -------------- trunk/toolkits/basemap/setup.py trunk/toolkits/basemap/src/_proj.c trunk/toolkits/basemap/src/_proj.pyx Added Paths: ----------- trunk/toolkits/basemap/src/c_numpy.pxd Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-10-30 20:53:46 UTC (rev 4069) +++ trunk/toolkits/basemap/setup.py 2007-10-31 05:56:06 UTC (rev 4070) @@ -13,6 +13,9 @@ from distutils.core import Extension from distutils.util import convert_path +import numpy + + def dbf_macros(): """Return the macros to define when compiling the dbflib wrapper. @@ -36,7 +39,9 @@ packages = ['matplotlib.toolkits.basemap'] package_dirs = {'':'lib'} -extensions = [Extension("matplotlib.toolkits.basemap._proj",deps+['src/_proj.c'],include_dirs = ['src'],)] +extensions = [Extension("matplotlib.toolkits.basemap._proj", + deps+['src/_proj.c'], + include_dirs = ['src', numpy.get_include()],)] extensions.append(Extension("matplotlib.toolkits.basemap._geod",deps+['src/_geod.c'],include_dirs = ['src'],)) # install shapelib and dbflib. @@ -61,21 +66,21 @@ if 'setuptools' in sys.modules: # Are we running with setuptools? # if so, need to specify all the packages in heirarchy - additional_params = {'namespace_packages' : ['matplotlib.toolkits']} + additional_params = {'namespace_packages' : ['matplotlib.toolkits']} packages.extend(['matplotlib', 'matplotlib.toolkits']) setup = setuptools.setup else: additional_params = {} from distutils.core import setup - - + + # Specify all the required mpl data pyproj_datafiles = ['data/epsg', 'data/esri', 'data/esri.extra', 'data/GL27', 'data/nad.lst', 'data/nad27', 'data/nad83', 'data/ntv2_out.dist', 'data/other.extra', 'data/pj_out27.dist', 'data/pj_out83.dist', 'data/proj_def.dat', 'data/README', 'data/td_out.dist', 'data/test27', 'data/test83', 'data/testntv2', 'data/testvarious', 'data/world'] basemap_datafiles = [ 'data/countries_c.txt', 'data/states_c.txt', 'data/rivers_c.txt', 'data/gshhs_c.txt', - 'data/countries_l.txt', + 'data/countries_l.txt', 'data/states_l.txt', 'data/rivers_l.txt', 'data/gshhs_l.txt', @@ -103,8 +108,8 @@ license = "OSI Approved", keywords = ["python","plotting","plots","graphs","charts","GIS","mapping","map projections","maps"], classifiers = ["Development Status :: 4 - Beta", - "Intended Audience :: Science/Research", - "License :: OSI Approved", + "Intended Audience :: Science/Research", + "License :: OSI Approved", "Topic :: Scientific/Engineering :: Visualization", "Topic :: Software Development :: Libraries :: Python Modules", "Operating System :: OS Independent"], Modified: trunk/toolkits/basemap/src/_proj.c =================================================================== --- trunk/toolkits/basemap/src/_proj.c 2007-10-30 20:53:46 UTC (rev 4069) +++ trunk/toolkits/basemap/src/_proj.c 2007-10-31 05:56:06 UTC (rev 4070) @@ -1,4 +1,4 @@ -/* Generated by Cython 0.9.6.7 on Sun Oct 14 08:16:26 2007 */ +/* Generated by Cython 0.9.6.7 on Sun Oct 28 13:19:08 2007 */ #define PY_SSIZE_T_CLEAN #include "Python.h" @@ -25,6 +25,7 @@ #include "math.h" #include "geodesic.h" #include "proj_api.h" +#include "numpy/arrayobject.h" #ifdef __GNUC__ @@ -84,8 +85,17 @@ static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ +static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/ + static void __Pyx_AddTraceback(char *funcname); /*proto*/ +/* Declarations from c_numpy */ + +static PyTypeObject *__pyx_ptype_7c_numpy_dtype = 0; +static PyTypeObject *__pyx_ptype_7c_numpy_ndarray = 0; +static PyTypeObject *__pyx_ptype_7c_numpy_flatiter = 0; +static PyTypeObject *__pyx_ptype_7c_numpy_broadcast = 0; + /* Declarations from _proj */ @@ -99,16 +109,22 @@ }; static PyTypeObject *__pyx_ptype_5_proj_Proj = 0; -static PyObject *__pyx_k3; static PyObject *__pyx_k4; static PyObject *__pyx_k5; static PyObject *__pyx_k6; +static PyObject *__pyx_k7; +static PyObject *__pyx_k8; +static PyObject *__pyx_k9; +static PyObject *__pyx_k10; +static PyObject *__pyx_k11; /* Implementation of _proj */ -static char (__pyx_k2[]) = "1.8.3"; +static char (__pyx_k3[]) = "1.8.3"; +static PyObject *__pyx_n_c_numpy; +static PyObject *__pyx_n_numpy; static PyObject *__pyx_n_math; static PyObject *__pyx_n__dg2rad; static PyObject *__pyx_n__rad2dg; @@ -119,6 +135,8 @@ static PyObject *__pyx_n___dealloc__; static PyObject *__pyx_n___reduce__; static PyObject *__pyx_n__fwd; +static PyObject *__pyx_n__fwdn; +static PyObject *__pyx_n__invn; static PyObject *__pyx_n__inv; static PyObject *__pyx_n_is_latlong; static PyObject *__pyx_n_is_geocent; @@ -126,7 +144,7 @@ static PyObject *__pyx_n_radians; static PyObject *__pyx_n_degrees; -static PyObject *__pyx_k2p; +static PyObject *__pyx_k3p; static PyObject *__pyx_f_py_5_proj_set_datapath(PyObject *__pyx_self, PyObject *__pyx_v_datapath); /*proto*/ static PyObject *__pyx_f_py_5_proj_set_datapath(PyObject *__pyx_self, PyObject *__pyx_v_datapath) { @@ -134,20 +152,20 @@ PyObject *__pyx_r; Py_INCREF(__pyx_v_datapath); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":7 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":11 * def set_datapath(datapath): * cdef char *searchpath * searchpath = PyString_AsString(datapath) # <<<<<<<<<<<<<< * pj_set_searchpath(1, &searchpath) - * + * */ __pyx_v_searchpath = PyString_AsString(__pyx_v_datapath); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":8 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":12 * cdef char *searchpath * searchpath = PyString_AsString(datapath) * pj_set_searchpath(1, &searchpath) # <<<<<<<<<<<<<< - * + * * cdef class Proj: */ pj_set_searchpath(1,(&__pyx_v_searchpath)); @@ -162,17 +180,17 @@ static PyObject *__pyx_n_join; static PyObject *__pyx_n_RuntimeError; -static PyObject *__pyx_k7p; -static PyObject *__pyx_k8p; -static PyObject *__pyx_k9p; -static PyObject *__pyx_k10p; +static PyObject *__pyx_k12p; +static PyObject *__pyx_k13p; +static PyObject *__pyx_k14p; +static PyObject *__pyx_k15p; static PyObject *__pyx_builtin_RuntimeError; -static char (__pyx_k7[]) = "+"; -static char (__pyx_k8[]) = "="; -static char (__pyx_k9[]) = " "; -static char (__pyx_k10[]) = ""; +static char (__pyx_k12[]) = "+"; +static char (__pyx_k13[]) = "="; +static char (__pyx_k14[]) = " "; +static char (__pyx_k15[]) = ""; static int __pyx_f_py_5_proj_4Proj___new__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ static int __pyx_f_py_5_proj_4Proj___new__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { @@ -196,7 +214,7 @@ __pyx_v_key = Py_None; Py_INCREF(Py_None); __pyx_v_value = Py_None; Py_INCREF(Py_None); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":18 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":22 * * def __new__(self, projparams): * self.projparams = projparams # <<<<<<<<<<<<<< @@ -207,30 +225,30 @@ Py_DECREF(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projparams); ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projparams = __pyx_v_projparams; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":20 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":24 * self.projparams = projparams * # setup proj initialization string. * pjargs = [] # <<<<<<<<<<<<<< * for key,value in projparams.iteritems(): * pjargs.append('+'+key+"="+str(value)+' ') */ - __pyx_1 = PyList_New(0); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; goto __pyx_L1;} + __pyx_1 = PyList_New(0); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; goto __pyx_L1;} Py_DECREF(__pyx_v_pjargs); __pyx_v_pjargs = __pyx_1; __pyx_1 = 0; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":21 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":25 * # setup proj initialization string. * pjargs = [] * for key,value in projparams.iteritems(): # <<<<<<<<<<<<<< * pjargs.append('+'+key+"="+str(value)+' ') * self.srs = ''.join(pjargs) */ - __pyx_1 = PyObject_GetAttr(__pyx_v_projparams, __pyx_n_iteritems); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} - __pyx_3 = PyObject_CallObject(__pyx_1, 0); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} + __pyx_1 = PyObject_GetAttr(__pyx_v_projparams, __pyx_n_iteritems); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} + __pyx_3 = PyObject_CallObject(__pyx_1, 0); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} Py_DECREF(__pyx_1); __pyx_1 = 0; if (PyList_CheckExact(__pyx_3)) { __pyx_2 = 0; __pyx_1 = __pyx_3; Py_INCREF(__pyx_1); } - else { __pyx_1 = PyObject_GetIter(__pyx_3); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} } + else { __pyx_1 = PyObject_GetIter(__pyx_3); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} } Py_DECREF(__pyx_3); __pyx_3 = 0; for (;;) { if (PyList_CheckExact(__pyx_1)) { if (__pyx_2 >= PyList_GET_SIZE(__pyx_1)) break; __pyx_3 = PyList_GET_ITEM(__pyx_1, __pyx_2++); Py_INCREF(__pyx_3); } @@ -254,70 +272,70 @@ Py_DECREF(__pyx_3); __pyx_3 = 0; } else { - __pyx_4 = PyObject_GetIter(__pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} + __pyx_4 = PyObject_GetIter(__pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_5 = __Pyx_UnpackItem(__pyx_4); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} + __pyx_5 = __Pyx_UnpackItem(__pyx_4); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} Py_DECREF(__pyx_v_key); __pyx_v_key = __pyx_5; __pyx_5 = 0; - __pyx_5 = __Pyx_UnpackItem(__pyx_4); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} + __pyx_5 = __Pyx_UnpackItem(__pyx_4); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} Py_DECREF(__pyx_v_value); __pyx_v_value = __pyx_5; __pyx_5 = 0; - if (__Pyx_EndUnpack(__pyx_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; goto __pyx_L1;} + if (__Pyx_EndUnpack(__pyx_4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; } - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":22 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":26 * pjargs = [] * for key,value in projparams.iteritems(): * pjargs.append('+'+key+"="+str(value)+' ') # <<<<<<<<<<<<<< * self.srs = ''.join(pjargs) * self.pjinitstring = PyString_AsString(self.srs) */ - __pyx_5 = PyObject_GetAttr(__pyx_v_pjargs, __pyx_n_append); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} - __pyx_3 = PyNumber_Add(__pyx_k7p, __pyx_v_key); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} - __pyx_4 = PyNumber_Add(__pyx_3, __pyx_k8p); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_5 = PyObject_GetAttr(__pyx_v_pjargs, __pyx_n_append); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} + __pyx_3 = PyNumber_Add(__pyx_k12p, __pyx_v_key); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} + __pyx_4 = PyNumber_Add(__pyx_3, __pyx_k13p); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_3 = PyTuple_New(1); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_3 = PyTuple_New(1); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_INCREF(__pyx_v_value); PyTuple_SET_ITEM(__pyx_3, 0, __pyx_v_value); - __pyx_6 = PyObject_CallObject(((PyObject*)&PyString_Type), __pyx_3); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_6 = PyObject_CallObject(((PyObject*)&PyString_Type), __pyx_3); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_3 = PyNumber_Add(__pyx_4, __pyx_6); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_3 = PyNumber_Add(__pyx_4, __pyx_6); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; Py_DECREF(__pyx_6); __pyx_6 = 0; - __pyx_4 = PyNumber_Add(__pyx_3, __pyx_k9p); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_4 = PyNumber_Add(__pyx_3, __pyx_k14p); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_6 = PyTuple_New(1); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_6 = PyTuple_New(1); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} PyTuple_SET_ITEM(__pyx_6, 0, __pyx_4); __pyx_4 = 0; - __pyx_3 = PyObject_CallObject(__pyx_5, __pyx_6); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; goto __pyx_L1;} + __pyx_3 = PyObject_CallObject(__pyx_5, __pyx_6); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; goto __pyx_L1;} Py_DECREF(__pyx_5); __pyx_5 = 0; Py_DECREF(__pyx_6); __pyx_6 = 0; Py_DECREF(__pyx_3); __pyx_3 = 0; } Py_DECREF(__pyx_1); __pyx_1 = 0; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":23 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":27 * for key,value in projparams.iteritems(): * pjargs.append('+'+key+"="+str(value)+' ') * self.srs = ''.join(pjargs) # <<<<<<<<<<<<<< * self.pjinitstring = PyString_AsString(self.srs) * # initialize projection */ - __pyx_4 = PyObject_GetAttr(__pyx_k10p, __pyx_n_join); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; goto __pyx_L1;} - __pyx_5 = PyTuple_New(1); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; goto __pyx_L1;} + __pyx_4 = PyObject_GetAttr(__pyx_k15p, __pyx_n_join); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; goto __pyx_L1;} + __pyx_5 = PyTuple_New(1); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; goto __pyx_L1;} Py_INCREF(__pyx_v_pjargs); PyTuple_SET_ITEM(__pyx_5, 0, __pyx_v_pjargs); - __pyx_6 = PyObject_CallObject(__pyx_4, __pyx_5); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; goto __pyx_L1;} + __pyx_6 = PyObject_CallObject(__pyx_4, __pyx_5); if (unlikely(!__pyx_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 27; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; Py_DECREF(__pyx_5); __pyx_5 = 0; Py_DECREF(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->srs); ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->srs = __pyx_6; __pyx_6 = 0; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":24 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":28 * pjargs.append('+'+key+"="+str(value)+' ') * self.srs = ''.join(pjargs) * self.pjinitstring = PyString_AsString(self.srs) # <<<<<<<<<<<<<< @@ -326,7 +344,7 @@ */ ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->pjinitstring = PyString_AsString(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->srs); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":26 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":30 * self.pjinitstring = PyString_AsString(self.srs) * # initialize projection * self.projpj = pj_init_plus(self.pjinitstring) # <<<<<<<<<<<<<< @@ -335,7 +353,7 @@ */ ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projpj = pj_init_plus(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->pjinitstring); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":27 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":31 * # initialize projection * self.projpj = pj_init_plus(self.pjinitstring) * if pj_errno != 0: # <<<<<<<<<<<<<< @@ -345,34 +363,34 @@ __pyx_7 = (pj_errno != 0); if (__pyx_7) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":28 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":32 * self.projpj = pj_init_plus(self.pjinitstring) * if pj_errno != 0: * raise RuntimeError(pj_strerrno(pj_errno)) # <<<<<<<<<<<<<< * self.proj_version = PJ_VERSION/100. * */ - __pyx_3 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; goto __pyx_L1;} - __pyx_1 = PyTuple_New(1); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; goto __pyx_L1;} + __pyx_3 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; goto __pyx_L1;} + __pyx_1 = PyTuple_New(1); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; goto __pyx_L1;} PyTuple_SET_ITEM(__pyx_1, 0, __pyx_3); __pyx_3 = 0; - __pyx_4 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_1); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; goto __pyx_L1;} + __pyx_4 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_1); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; goto __pyx_L1;} Py_DECREF(__pyx_1); __pyx_1 = 0; __Pyx_Raise(__pyx_4, 0, 0); Py_DECREF(__pyx_4); __pyx_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; goto __pyx_L1;} + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; goto __pyx_L1;} goto __pyx_L4; } __pyx_L4:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":29 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":33 * if pj_errno != 0: * raise RuntimeError(pj_strerrno(pj_errno)) * self.proj_version = PJ_VERSION/100. # <<<<<<<<<<<<<< * * def __dealloc__(self): */ - __pyx_5 = PyFloat_FromDouble((PJ_VERSION / 100.)); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; goto __pyx_L1;} + __pyx_5 = PyFloat_FromDouble((PJ_VERSION / 100.)); if (unlikely(!__pyx_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; goto __pyx_L1;} Py_DECREF(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->proj_version); ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->proj_version = __pyx_5; __pyx_5 = 0; @@ -401,7 +419,7 @@ static void __pyx_f_py_5_proj_4Proj___dealloc__(PyObject *__pyx_v_self) { Py_INCREF(__pyx_v_self); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":33 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":37 * def __dealloc__(self): * """destroy projection definition""" * pj_free(self.projpj) # <<<<<<<<<<<<<< @@ -424,18 +442,18 @@ PyObject *__pyx_3 = 0; Py_INCREF(__pyx_v_self); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":37 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":41 * def __reduce__(self): * """special method that allows pyproj.Proj instance to be pickled""" * return (self.__class__,(self.projparams,)) # <<<<<<<<<<<<<< * * def _fwd(self, object lons, object lats, radians=False, errcheck=False): */ - __pyx_1 = PyObject_GetAttr(__pyx_v_self, __pyx_n___class__); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; goto __pyx_L1;} - __pyx_2 = PyTuple_New(1); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; goto __pyx_L1;} + __pyx_1 = PyObject_GetAttr(__pyx_v_self, __pyx_n___class__); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; goto __pyx_L1;} + __pyx_2 = PyTuple_New(1); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; goto __pyx_L1;} Py_INCREF(((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projparams); PyTuple_SET_ITEM(__pyx_2, 0, ((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projparams); - __pyx_3 = PyTuple_New(2); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; goto __pyx_L1;} + __pyx_3 = PyTuple_New(2); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; goto __pyx_L1;} PyTuple_SET_ITEM(__pyx_3, 0, __pyx_1); PyTuple_SET_ITEM(__pyx_3, 1, __pyx_2); __pyx_1 = 0; @@ -457,12 +475,12 @@ return __pyx_r; } -static PyObject *__pyx_k11p; +static PyObject *__pyx_k16p; -static char (__pyx_k11[]) = "Buffer lengths not the same"; +static char (__pyx_k16[]) = "Buffer lengths not the same"; static PyObject *__pyx_f_py_5_proj_4Proj__fwd(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static char __pyx_doc_5_proj_4Proj__fwd[] = "\n forward transformation - lons,lats to x,y (done in place).\n if radians=True, lons/lats are radians instead of degrees.\n if errcheck=True, an exception is raised if the forward transformation is invalid.\n if errcheck=False and the forward transformation is invalid, no exception is\n raised and 1.e30 is returned.\n "; +static char __pyx_doc_5_proj_4Proj__fwd[] = "\n forward transformation - lons,lats to x,y (done in place).\n if radians=True, lons/lats are radians instead of degrees.\n if errcheck=True, an exception is raised if the forward transformation is invalid.\n if errcheck=False and the forward transformation is invalid, no exception is\n raised and 1.e30 is returned.\n "; static PyObject *__pyx_f_py_5_proj_4Proj__fwd(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { PyObject *__pyx_v_lons = 0; PyObject *__pyx_v_lats = 0; @@ -486,8 +504,8 @@ Py_ssize_t __pyx_5; double __pyx_6; static char *__pyx_argnames[] = {"lons","lats","radians","errcheck",0}; - __pyx_v_radians = __pyx_k3; - __pyx_v_errcheck = __pyx_k4; + __pyx_v_radians = __pyx_k4; + __pyx_v_errcheck = __pyx_k5; if (unlikely(!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "OO|OO", __pyx_argnames, &__pyx_v_lons, &__pyx_v_lats, &__pyx_v_radians, &__pyx_v_errcheck))) return 0; Py_INCREF(__pyx_v_self); Py_INCREF(__pyx_v_lons); @@ -495,7 +513,7 @@ Py_INCREF(__pyx_v_radians); Py_INCREF(__pyx_v_errcheck); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":53 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":57 * cdef void *londata, *latdata * # if buffer api is supported, get pointer to data buffers. * if PyObject_AsWriteBuffer(lons, &londata, &buflenx) <> 0: # <<<<<<<<<<<<<< @@ -505,7 +523,7 @@ __pyx_1 = (PyObject_AsWriteBuffer(__pyx_v_lons,(&__pyx_v_londata),(&__pyx_v_buflenx)) != 0); if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":54 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":58 * # if buffer api is supported, get pointer to data buffers. * if PyObject_AsWriteBuffer(lons, &londata, &buflenx) <> 0: * raise RuntimeError # <<<<<<<<<<<<<< @@ -513,12 +531,12 @@ * raise RuntimeError */ __Pyx_Raise(__pyx_builtin_RuntimeError, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; goto __pyx_L1;} + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; goto __pyx_L1;} goto __pyx_L2; } __pyx_L2:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":55 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":59 * if PyObject_AsWriteBuffer(lons, &londata, &buflenx) <> 0: * raise RuntimeError * if PyObject_AsWriteBuffer(lats, &latdata, &bufleny) <> 0: # <<<<<<<<<<<<<< @@ -528,7 +546,7 @@ __pyx_1 = (PyObject_AsWriteBuffer(__pyx_v_lats,(&__pyx_v_latdata),(&__pyx_v_bufleny)) != 0); if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":56 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":60 * raise RuntimeError * if PyObject_AsWriteBuffer(lats, &latdata, &bufleny) <> 0: * raise RuntimeError # <<<<<<<<<<<<<< @@ -536,12 +554,12 @@ * if buflenx != bufleny: */ __Pyx_Raise(__pyx_builtin_RuntimeError, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; goto __pyx_L1;} + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; goto __pyx_L1;} goto __pyx_L3; } __pyx_L3:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":58 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":62 * raise RuntimeError * # process data in buffer * if buflenx != bufleny: # <<<<<<<<<<<<<< @@ -551,42 +569,42 @@ __pyx_1 = (__pyx_v_buflenx != __pyx_v_bufleny); if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":59 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":63 * # process data in buffer * if buflenx != bufleny: * raise RuntimeError("Buffer lengths not the same") # <<<<<<<<<<<<<< * ndim = buflenx/_doublesize * lonsdata = <double *>londata */ - __pyx_2 = PyTuple_New(1); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 59; goto __pyx_L1;} - Py_INCREF(__pyx_k11p); - PyTuple_SET_ITEM(__pyx_2, 0, __pyx_k11p); - __pyx_3 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_2); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 59; goto __pyx_L1;} + __pyx_2 = PyTuple_New(1); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 63; goto __pyx_L1;} + Py_INCREF(__pyx_k16p); + PyTuple_SET_ITEM(__pyx_2, 0, __pyx_k16p); + __pyx_3 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_2); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 63; goto __pyx_L1;} Py_DECREF(__pyx_2); __pyx_2 = 0; __Pyx_Raise(__pyx_3, 0, 0); Py_DECREF(__pyx_3); __pyx_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 59; goto __pyx_L1;} + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 63; goto __pyx_L1;} goto __pyx_L4; } __pyx_L4:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":60 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":64 * if buflenx != bufleny: * raise RuntimeError("Buffer lengths not the same") * ndim = buflenx/_doublesize # <<<<<<<<<<<<<< * lonsdata = <double *>londata * latsdata = <double *>latdata */ - __pyx_2 = PyInt_FromSsize_t(__pyx_v_buflenx); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; goto __pyx_L1;} - __pyx_3 = __Pyx_GetName(__pyx_m, __pyx_n__doublesize); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; goto __pyx_L1;} - __pyx_4 = PyNumber_Divide(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; goto __pyx_L1;} + __pyx_2 = PyInt_FromSsize_t(__pyx_v_buflenx); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; goto __pyx_L1;} + __pyx_3 = __Pyx_GetName(__pyx_m, __pyx_n__doublesize); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; goto __pyx_L1;} + __pyx_4 = PyNumber_Divide(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; goto __pyx_L1;} Py_DECREF(__pyx_2); __pyx_2 = 0; Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_5 = __pyx_PyIndex_AsSsize_t(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; goto __pyx_L1;} + __pyx_5 = __pyx_PyIndex_AsSsize_t(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; __pyx_v_ndim = __pyx_5; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":61 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":65 * raise RuntimeError("Buffer lengths not the same") * ndim = buflenx/_doublesize * lonsdata = <double *>londata # <<<<<<<<<<<<<< @@ -595,7 +613,7 @@ */ __pyx_v_lonsdata = ((double (*))__pyx_v_londata); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":62 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":66 * ndim = buflenx/_doublesize * lonsdata = <double *>londata * latsdata = <double *>latdata # <<<<<<<<<<<<<< @@ -604,7 +622,7 @@ */ __pyx_v_latsdata = ((double (*))__pyx_v_latdata); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":63 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":67 * lonsdata = <double *>londata * latsdata = <double *>latdata * for i from 0 <= i < ndim: # <<<<<<<<<<<<<< @@ -613,17 +631,17 @@ */ for (__pyx_v_i = 0; __pyx_v_i < __pyx_v_ndim; __pyx_v_i++) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":64 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":68 * latsdata = <double *>latdata * for i from 0 <= i < ndim: * if radians: # <<<<<<<<<<<<<< * projlonlatin.u = lonsdata[i] * projlonlatin.v = latsdata[i] */ - __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_v_radians); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; goto __pyx_L1;} + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_v_radians); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; goto __pyx_L1;} if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":65 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":69 * for i from 0 <= i < ndim: * if radians: * projlonlatin.u = lonsdata[i] # <<<<<<<<<<<<<< @@ -632,7 +650,7 @@ */ __pyx_v_projlonlatin.u = (__pyx_v_lonsdata[__pyx_v_i]); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":66 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":70 * if radians: * projlonlatin.u = lonsdata[i] * projlonlatin.v = latsdata[i] # <<<<<<<<<<<<<< @@ -644,41 +662,41 @@ } /*else*/ { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":68 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":72 * projlonlatin.v = latsdata[i] * else: * projlonlatin.u = _dg2rad*lonsdata[i] # <<<<<<<<<<<<<< * projlonlatin.v = _dg2rad*latsdata[i] * projxyout = pj_fwd(projlonlatin,self.projpj) */ - __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; goto __pyx_L1;} - __pyx_3 = PyFloat_FromDouble((__pyx_v_lonsdata[__pyx_v_i])); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; goto __pyx_L1;} - __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; goto __pyx_L1;} + __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} + __pyx_3 = PyFloat_FromDouble((__pyx_v_lonsdata[__pyx_v_i])); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} + __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} Py_DECREF(__pyx_2); __pyx_2 = 0; Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_6 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; goto __pyx_L1;} + __pyx_6 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; __pyx_v_projlonlatin.u = __pyx_6; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":69 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":73 * else: * projlonlatin.u = _dg2rad*lonsdata[i] * projlonlatin.v = _dg2rad*latsdata[i] # <<<<<<<<<<<<<< * projxyout = pj_fwd(projlonlatin,self.projpj) * if errcheck and pj_errno != 0: */ - __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 69; goto __pyx_L1;} - __pyx_3 = PyFloat_FromDouble((__pyx_v_latsdata[__pyx_v_i])); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 69; goto __pyx_L1;} - __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 69; goto __pyx_L1;} + __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 73; goto __pyx_L1;} + __pyx_3 = PyFloat_FromDouble((__pyx_v_latsdata[__pyx_v_i])); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 73; goto __pyx_L1;} + __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 73; goto __pyx_L1;} Py_DECREF(__pyx_2); __pyx_2 = 0; Py_DECREF(__pyx_3); __pyx_3 = 0; - __pyx_6 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 69; goto __pyx_L1;} + __pyx_6 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 73; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; __pyx_v_projlonlatin.v = __pyx_6; } __pyx_L7:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":70 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":74 * projlonlatin.u = _dg2rad*lonsdata[i] * projlonlatin.v = _dg2rad*latsdata[i] * projxyout = pj_fwd(projlonlatin,self.projpj) # <<<<<<<<<<<<<< @@ -687,7 +705,7 @@ */ __pyx_v_projxyout = pj_fwd(__pyx_v_projlonlatin,((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projpj); - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":71 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":75 * projlonlatin.v = _dg2rad*latsdata[i] * projxyout = pj_fwd(projlonlatin,self.projpj) * if errcheck and pj_errno != 0: # <<<<<<<<<<<<<< @@ -696,36 +714,36 @@ */ __pyx_2 = __pyx_v_errcheck; Py_INCREF(__pyx_2); - __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 71; goto __pyx_L1;} + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 75; goto __pyx_L1;} if (__pyx_1) { Py_DECREF(__pyx_2); __pyx_2 = 0; - __pyx_2 = __Pyx_PyBool_FromLong((pj_errno != 0)); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 71; goto __pyx_L1;} + __pyx_2 = __Pyx_PyBool_FromLong((pj_errno != 0)); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 75; goto __pyx_L1;} } - __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 71; goto __pyx_L1;} + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 75; goto __pyx_L1;} Py_DECREF(__pyx_2); __pyx_2 = 0; if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":72 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":76 * projxyout = pj_fwd(projlonlatin,self.projpj) * if errcheck and pj_errno != 0: * raise RuntimeError(pj_strerrno(pj_errno)) # <<<<<<<<<<<<<< * # since HUGE_VAL can be 'inf', * # change it to a real (but very large) number. */ - __pyx_3 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} - __pyx_4 = PyTuple_New(1); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} + __pyx_3 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 76; goto __pyx_L1;} + __pyx_4 = PyTuple_New(1); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 76; goto __pyx_L1;} PyTuple_SET_ITEM(__pyx_4, 0, __pyx_3); __pyx_3 = 0; - __pyx_2 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_4); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} + __pyx_2 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_4); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 76; goto __pyx_L1;} Py_DECREF(__pyx_4); __pyx_4 = 0; __Pyx_Raise(__pyx_2, 0, 0); Py_DECREF(__pyx_2); __pyx_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; goto __pyx_L1;} + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 76; goto __pyx_L1;} goto __pyx_L8; } __pyx_L8:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":75 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":79 * # since HUGE_VAL can be 'inf', * # change it to a real (but very large) number. * if projxyout.u == HUGE_VAL: # <<<<<<<<<<<<<< @@ -735,7 +753,7 @@ __pyx_1 = (__pyx_v_projxyout.u == HUGE_VAL); if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":76 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":80 * # change it to a real (but very large) number. * if projxyout.u == HUGE_VAL: * lonsdata[i] = 1.e30 # <<<<<<<<<<<<<< @@ -747,7 +765,7 @@ } /*else*/ { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":78 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":82 * lonsdata[i] = 1.e30 * else: * lonsdata[i] = projxyout.u # <<<<<<<<<<<<<< @@ -758,21 +776,21 @@ } __pyx_L9:; - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":79 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":83 * else: * lonsdata[i] = projxyout.u * if projxyout.v == HUGE_VAL: # <<<<<<<<<<<<<< * latsdata[i] = 1.e30 - * else: + * else: */ __pyx_1 = (__pyx_v_projxyout.v == HUGE_VAL); if (__pyx_1) { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":80 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":84 * lonsdata[i] = projxyout.u * if projxyout.v == HUGE_VAL: * latsdata[i] = 1.e30 # <<<<<<<<<<<<<< - * else: + * else: * latsdata[i] = projxyout.v */ (__pyx_v_latsdata[__pyx_v_i]) = 1.e30; @@ -780,12 +798,12 @@ } /*else*/ { - /* "/Users/jsw/python/matplotlib/toolkits/basemap/src/_proj.pyx":82 + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":86 * latsdata[i] = 1.e30 - * else: + * else: * latsdata[i] = projxyout.v # <<<<<<<<<<<<<< * - * def _inv(self, object x, object y, radians=False, errcheck=False): + * def _fwdn(self, c_numpy.ndarray lonlat, radians=False, errcheck=False): */ (__pyx_v_latsdata[__pyx_v_i]) = __pyx_v_projxyout.v; } @@ -809,12 +827,485 @@ return __pyx_r; } -static PyObject *__pyx_k12p; +static PyObject *__pyx_f_py_5_proj_4Proj__fwdn(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_5_proj_4Proj__fwdn[] = "\n forward transformation - lons,lats to x,y (done in place).\n Uses ndarray of shape ...,2.\n if radians=True, lons/lats are radians instead of degrees.\n if errcheck=True, an exception is raised if the forward\n transformation is invalid.\n if errcheck=False and the forward transformation is\n invalid, no exception is\n raised and 1.e30 is returned.\n "; +static PyObject *__pyx_f_py_5_proj_4Proj__fwdn(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_lonlat = 0; + PyObject *__pyx_v_radians = 0; + PyObject *__pyx_v_errcheck = 0; + projUV __pyx_v_projxyout; + projUV __pyx_v_projlonlatin; + projUV (*__pyx_v_llptr); + Py_ssize_t __pyx_v_npts; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_r; + int __pyx_1; + PyObject *__pyx_2 = 0; + PyObject *__pyx_3 = 0; + PyObject *__pyx_4 = 0; + double __pyx_5; + static char *__pyx_argnames[] = {"lonlat","radians","errcheck",0}; + __pyx_v_radians = __pyx_k6; + __pyx_v_errcheck = __pyx_k7; + if (unlikely(!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "O|OO", __pyx_argnames, &__pyx_v_lonlat, &__pyx_v_radians, &__pyx_v_errcheck))) return 0; + Py_INCREF(__pyx_v_self); + Py_INCREF(__pyx_v_lonlat); + Py_INCREF(__pyx_v_radians); + Py_INCREF(__pyx_v_errcheck); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_lonlat), __pyx_ptype_7c_numpy_ndarray, 1, "lonlat"))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 88; goto __pyx_L1;} -static char (__pyx_k12[]) = "Buffer lengths not the same"; + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":102 + * cdef projUV *llptr + * cdef Py_ssize_t npts, i + * npts = c_numpy.PyArray_SIZE(lonlat)//2 # <<<<<<<<<<<<<< + * llptr = <projUV *>lonlat.data + * for i from 0 <= i < npts: + */ + __pyx_v_npts = (PyArray_SIZE(__pyx_v_lonlat) / 2); + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":103 + * cdef Py_ssize_t npts, i + * npts = c_numpy.PyArray_SIZE(lonlat)//2 + * llptr = <projUV *>lonlat.data # <<<<<<<<<<<<<< + * for i from 0 <= i < npts: + * if radians: + */ + __pyx_v_llptr = ((projUV (*))__pyx_v_lonlat->data); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":104 + * npts = c_numpy.PyArray_SIZE(lonlat)//2 + * llptr = <projUV *>lonlat.data + * for i from 0 <= i < npts: # <<<<<<<<<<<<<< + * if radians: + * projlonlatin = llptr[i] + */ + for (__pyx_v_i = 0; __pyx_v_i < __pyx_v_npts; __pyx_v_i++) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":105 + * llptr = <projUV *>lonlat.data + * for i from 0 <= i < npts: + * if radians: # <<<<<<<<<<<<<< + * projlonlatin = llptr[i] + * else: + */ + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_v_radians); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 105; goto __pyx_L1;} + if (__pyx_1) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":106 + * for i from 0 <= i < npts: + * if radians: + * projlonlatin = llptr[i] # <<<<<<<<<<<<<< + * else: + * projlonlatin.u = _dg2rad*llptr[i].u + */ + __pyx_v_projlonlatin = (__pyx_v_llptr[__pyx_v_i]); + goto __pyx_L4; + } + /*else*/ { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":108 + * projlonlatin = llptr[i] + * else: + * projlonlatin.u = _dg2rad*llptr[i].u # <<<<<<<<<<<<<< + * projlonlatin.v = _dg2rad*llptr[i].v + * projxyout = pj_fwd(projlonlatin,self.projpj) + */ + __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 108; goto __pyx_L1;} + __pyx_3 = PyFloat_FromDouble((__pyx_v_llptr[__pyx_v_i]).u); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 108; goto __pyx_L1;} + __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 108; goto __pyx_L1;} + Py_DECREF(__pyx_2); __pyx_2 = 0; + Py_DECREF(__pyx_3); __pyx_3 = 0; + __pyx_5 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 108; goto __pyx_L1;} + Py_DECREF(__pyx_4); __pyx_4 = 0; + __pyx_v_projlonlatin.u = __pyx_5; + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":109 + * else: + * projlonlatin.u = _dg2rad*llptr[i].u + * projlonlatin.v = _dg2rad*llptr[i].v # <<<<<<<<<<<<<< + * projxyout = pj_fwd(projlonlatin,self.projpj) + * + */ + __pyx_2 = __Pyx_GetName(__pyx_m, __pyx_n__dg2rad); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 109; goto __pyx_L1;} + __pyx_3 = PyFloat_FromDouble((__pyx_v_llptr[__pyx_v_i]).v); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 109; goto __pyx_L1;} + __pyx_4 = PyNumber_Multiply(__pyx_2, __pyx_3); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 109; goto __pyx_L1;} + Py_DECREF(__pyx_2); __pyx_2 = 0; + Py_DECREF(__pyx_3); __pyx_3 = 0; + __pyx_5 = PyFloat_AsDouble(__pyx_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 109; goto __pyx_L1;} + Py_DECREF(__pyx_4); __pyx_4 = 0; + __pyx_v_projlonlatin.v = __pyx_5; + } + __pyx_L4:; + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":110 + * projlonlatin.u = _dg2rad*llptr[i].u + * projlonlatin.v = _dg2rad*llptr[i].v + * projxyout = pj_fwd(projlonlatin,self.projpj) # <<<<<<<<<<<<<< + * + * if errcheck and pj_errno != 0: + */ + __pyx_v_projxyout = pj_fwd(__pyx_v_projlonlatin,((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projpj); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":112 + * projxyout = pj_fwd(projlonlatin,self.projpj) + * + * if errcheck and pj_errno != 0: # <<<<<<<<<<<<<< + * raise RuntimeError(pj_strerrno(pj_errno)) + * # since HUGE_VAL can be 'inf', + */ + __pyx_2 = __pyx_v_errcheck; + Py_INCREF(__pyx_2); + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 112; goto __pyx_L1;} + if (__pyx_1) { + Py_DECREF(__pyx_2); __pyx_2 = 0; + __pyx_2 = __Pyx_PyBool_FromLong((pj_errno != 0)); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 112; goto __pyx_L1;} + } + __pyx_1 = __Pyx_PyObject_IsTrue(__pyx_2); if (unlikely(__pyx_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 112; goto __pyx_L1;} + Py_DECREF(__pyx_2); __pyx_2 = 0; + if (__pyx_1) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":113 + * + * if errcheck and pj_errno != 0: + * raise RuntimeError(pj_strerrno(pj_errno)) # <<<<<<<<<<<<<< + * # since HUGE_VAL can be 'inf', + * # change it to a real (but very large) number. + */ + __pyx_3 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 113; goto __pyx_L1;} + __pyx_4 = PyTuple_New(1); if (unlikely(!__pyx_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 113; goto __pyx_L1;} + PyTuple_SET_ITEM(__pyx_4, 0, __pyx_3); + __pyx_3 = 0; + __pyx_2 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_4); if (unlikely(!__pyx_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 113; goto __pyx_L1;} + Py_DECREF(__pyx_4); __pyx_4 = 0; + __Pyx_Raise(__pyx_2, 0, 0); + Py_DECREF(__pyx_2); __pyx_2 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 113; goto __pyx_L1;} + goto __pyx_L5; + } + __pyx_L5:; + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":116 + * # since HUGE_VAL can be 'inf', + * # change it to a real (but very large) number. + * if projxyout.u == HUGE_VAL: # <<<<<<<<<<<<<< + * llptr[i].u = 1.e30 + * else: + */ + __pyx_1 = (__pyx_v_projxyout.u == HUGE_VAL); + if (__pyx_1) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":117 + * # change it to a real (but very large) number. + * if projxyout.u == HUGE_VAL: + * llptr[i].u = 1.e30 # <<<<<<<<<<<<<< + * else: + * llptr[i].u = projxyout.u + */ + (__pyx_v_llptr[__pyx_v_i]).u = 1.e30; + goto __pyx_L6; + } + /*else*/ { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":119 + * llptr[i].u = 1.e30 + * else: + * llptr[i].u = projxyout.u # <<<<<<<<<<<<<< + * if projxyout.v == HUGE_VAL: + * llptr[i].u = 1.e30 + */ + (__pyx_v_llptr[__pyx_v_i]).u = __pyx_v_projxyout.u; + } + __pyx_L6:; + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":120 + * else: + * llptr[i].u = projxyout.u + * if projxyout.v == HUGE_VAL: # <<<<<<<<<<<<<< + * llptr[i].u = 1.e30 + * else: + */ + __pyx_1 = (__pyx_v_projxyout.v == HUGE_VAL); + if (__pyx_1) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":121 + * llptr[i].u = projxyout.u + * if projxyout.v == HUGE_VAL: + * llptr[i].u = 1.e30 # <<<<<<<<<<<<<< + * else: + * llptr[i].v = projxyout.v + */ + (__pyx_v_llptr[__pyx_v_i]).u = 1.e30; + goto __pyx_L7; + } + /*else*/ { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":123 + * llptr[i].u = 1.e30 + * else: + * llptr[i].v = projxyout.v # <<<<<<<<<<<<<< + * + * def _invn(self, c_numpy.ndarray xy, radians=False, errcheck=False): + */ + (__pyx_v_llptr[__pyx_v_i]).v = __pyx_v_projxyout.v; + } + __pyx_L7:; + } + + __pyx_r = Py_None; Py_INCREF(Py_None); + goto __pyx_L0; + __pyx_L1:; + Py_XDECREF(__pyx_2); + Py_XDECREF(__pyx_3); + Py_XDECREF(__pyx_4); + __Pyx_AddTraceback("_proj.Proj._fwdn"); + __pyx_r = 0; + __pyx_L0:; + Py_DECREF(__pyx_v_self); + Py_DECREF(__pyx_v_lonlat); + Py_DECREF(__pyx_v_radians); + Py_DECREF(__pyx_v_errcheck); + return __pyx_r; +} + +static PyObject *__pyx_f_py_5_proj_4Proj__invn(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ +static char __pyx_doc_5_proj_4Proj__invn[] = "\n inverse transformation - x,y to lons,lats (done in place).\n Uses ndarray of shape ...,2.\n if radians=True, lons/lats are radians instead of degrees.\n if errcheck=True, an exception is raised if the inverse transformation is invalid.\n if errcheck=False and the inverse transformation is invalid, no exception is\n raised and 1.e30 is returned.\n "; +static PyObject *__pyx_f_py_5_proj_4Proj__invn(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { + PyArrayObject *__pyx_v_xy = 0; + PyObject *__pyx_v_radians = 0; + PyObject *__pyx_v_errcheck = 0; + projUV __pyx_v_projxyin; + projUV __pyx_v_projlonlatout; + projUV (*__pyx_v_llptr); + Py_ssize_t __pyx_v_npts; + Py_ssize_t __pyx_v_i; + PyObject *__pyx_r; + PyObject *__pyx_1 = 0; + int __pyx_2; + PyObject *__pyx_3 = 0; + PyObject *__pyx_4 = 0; + double __pyx_5; + static char *__pyx_argnames[] = {"xy","radians","errcheck",0}; + __pyx_v_radians = __pyx_k8; + __pyx_v_errcheck = __pyx_k9; + if (unlikely(!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "O|OO", __pyx_argnames, &__pyx_v_xy, &__pyx_v_radians, &__pyx_v_errcheck))) return 0; + Py_INCREF(__pyx_v_self); + Py_INCREF(__pyx_v_xy); + Py_INCREF(__pyx_v_radians); + Py_INCREF(__pyx_v_errcheck); + if (unlikely(!__Pyx_ArgTypeTest(((PyObject *)__pyx_v_xy), __pyx_ptype_7c_numpy_ndarray, 1, "xy"))) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 125; goto __pyx_L1;} + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":137 + * cdef projUV *llptr + * cdef Py_ssize_t npts, i + * npts = c_numpy.PyArray_SIZE(xy)//2 # <<<<<<<<<<<<<< + * llptr = <projUV *>xy.data + * + */ + __pyx_v_npts = (PyArray_SIZE(__pyx_v_xy) / 2); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":138 + * cdef Py_ssize_t npts, i + * npts = c_numpy.PyArray_SIZE(xy)//2 + * llptr = <projUV *>xy.data # <<<<<<<<<<<<<< + * + * for i from 0 <= i < npts: + */ + __pyx_v_llptr = ((projUV (*))__pyx_v_xy->data); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":140 + * llptr = <projUV *>xy.data + * + * for i from 0 <= i < npts: # <<<<<<<<<<<<<< + * projxyin = llptr[i] + * projlonlatout = pj_inv(projxyin, self.projpj) + */ + for (__pyx_v_i = 0; __pyx_v_i < __pyx_v_npts; __pyx_v_i++) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":141 + * + * for i from 0 <= i < npts: + * projxyin = llptr[i] # <<<<<<<<<<<<<< + * projlonlatout = pj_inv(projxyin, self.projpj) + * if errcheck and pj_errno != 0: + */ + __pyx_v_projxyin = (__pyx_v_llptr[__pyx_v_i]); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":142 + * for i from 0 <= i < npts: + * projxyin = llptr[i] + * projlonlatout = pj_inv(projxyin, self.projpj) # <<<<<<<<<<<<<< + * if errcheck and pj_errno != 0: + * raise RuntimeError(pj_strerrno(pj_errno)) + */ + __pyx_v_projlonlatout = pj_inv(__pyx_v_projxyin,((struct __pyx_obj_5_proj_Proj *)__pyx_v_self)->projpj); + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":143 + * projxyin = llptr[i] + * projlonlatout = pj_inv(projxyin, self.projpj) + * if errcheck and pj_errno != 0: # <<<<<<<<<<<<<< + * raise RuntimeError(pj_strerrno(pj_errno)) + * # since HUGE_VAL can be 'inf', + */ + __pyx_1 = __pyx_v_errcheck; + Py_INCREF(__pyx_1); + __pyx_2 = __Pyx_PyObject_IsTrue(__pyx_1); if (unlikely(__pyx_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; goto __pyx_L1;} + if (__pyx_2) { + Py_DECREF(__pyx_1); __pyx_1 = 0; + __pyx_1 = __Pyx_PyBool_FromLong((pj_errno != 0)); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; goto __pyx_L1;} + } + __pyx_2 = __Pyx_PyObject_IsTrue(__pyx_1); if (unlikely(__pyx_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; goto __pyx_L1;} + Py_DECREF(__pyx_1); __pyx_1 = 0; + if (__pyx_2) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":144 + * projlonlatout = pj_inv(projxyin, self.projpj) + * if errcheck and pj_errno != 0: + * raise RuntimeError(pj_strerrno(pj_errno)) # <<<<<<<<<<<<<< + * # since HUGE_VAL can be 'inf', + * # change it to a real (but very large) number. + */ + __pyx_1 = PyString_FromString(pj_strerrno(pj_errno)); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; goto __pyx_L1;} + __pyx_3 = PyTuple_New(1); if (unlikely(!__pyx_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; goto __pyx_L1;} + PyTuple_SET_ITEM(__pyx_3, 0, __pyx_1); + __pyx_1 = 0; + __pyx_1 = PyObject_CallObject(__pyx_builtin_RuntimeError, __pyx_3); if (unlikely(!__pyx_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; goto __pyx_L1;} + Py_DECREF(__pyx_3); __pyx_3 = 0; + __Pyx_Raise(__pyx_1, 0, 0); + Py_DECREF(__pyx_1); __pyx_1 = 0; + {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; goto __pyx_L1;} + goto __pyx_L4; + } + __pyx_L4:; + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":147 + * # since HUGE_VAL can be 'inf', + * # change it to a real (but very large) number. + * if projlonlatout.u == HUGE_VAL: # <<<<<<<<<<<<<< + * llptr[i].u = 1.e30 + * elif radians: + */ + __pyx_2 = (__pyx_v_projlonlatout.u == HUGE_VAL); + if (__pyx_2) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":148 + * # change it to a real (but very large) number. + * if projlonlatout.u == HUGE_VAL: + * llptr[i].u = 1.e30 # <<<<<<<<<<<<<< + * elif radians: + * llptr[i].u = projlonlatout.u + */ + (__pyx_v_llptr[__pyx_v_i]).u = 1.e30; + goto __pyx_L5; + } + __pyx_2 = __Pyx_PyObject_IsTrue(__pyx_v_radians); if (unlikely(__pyx_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 149; goto __pyx_L1;} + if (__pyx_2) { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":150 + * llptr[i].u = 1.e30 + * elif radians: + * llptr[i].u = projlonlatout.u # <<<<<<<<<<<<<< + * else: + * llptr[i].u = _rad2dg*projlonlatout.u + */ + (__pyx_v_llptr[__pyx_v_i]).u = __pyx_v_projlonlatout.u; + goto __pyx_L5; + } + /*else*/ { + + /* "/home/efiring/programs/py/mpl/basemap.dev/src/_proj.pyx":152 + * llptr[i].u = projlonlatout.u + * else: + * llptr[i].u = _rad2dg*projlonlatout.u # <<<<<<<<<<<<<< + * if projlonlatout.v == HUGE_VAL: + * llptr[i].v = 1.e30 + */ + __pyx_3 = __Pyx_GetName(__pyx_m, __pyx_n__rad2dg); if (unlikely(!__pyx_3)) {__py... [truncated message content] |
From: <js...@us...> - 2007-11-22 15:28:32
|
Revision: 4418 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4418&view=rev Author: jswhit Date: 2007-11-22 07:28:30 -0800 (Thu, 22 Nov 2007) Log Message: ----------- changed version number to 0.9.6.1 Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-22 03:53:04 UTC (rev 4417) +++ trunk/toolkits/basemap/Changelog 2007-11-22 15:28:30 UTC (rev 4418) @@ -1,4 +1,4 @@ -version 0.9.7 (not yet released) +version 0.9.6.1 (not yet released) * fix rotate_vector so it works in S. Hem and for non-orthogonal grids. Support for masked velocity vectors also added. (EF) * numpification. (EF) Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-22 03:53:04 UTC (rev 4417) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-22 15:28:30 UTC (rev 4418) @@ -22,7 +22,7 @@ # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) -__version__ = '0.9.7' +__version__ = '0.9.6.1' Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-11-22 03:53:04 UTC (rev 4417) +++ trunk/toolkits/basemap/setup.py 2007-11-22 15:28:30 UTC (rev 4418) @@ -93,7 +93,7 @@ package_data = {'matplotlib.toolkits.basemap':pyproj_datafiles+basemap_datafiles} setup( name = "basemap", - version = "0.9.7", + version = "0.9.6.1", description = "Plot data on map projections with matplotlib", long_description = """ An add-on toolkit for matplotlib that lets you plot data This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-22 16:17:06
|
Revision: 4422 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4422&view=rev Author: jswhit Date: 2007-11-22 08:17:03 -0800 (Thu, 22 Nov 2007) Log Message: ----------- fix quiver_demo Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/examples/quiver_demo.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-22 15:45:42 UTC (rev 4421) +++ trunk/toolkits/basemap/Changelog 2007-11-22 16:17:03 UTC (rev 4422) @@ -1,4 +1,4 @@ -version 0.9.7 (svn revision 4421) +version 0.9.7 (svn revision 4422) * fixed bug in drawlsmask for 'moll','robin' and 'sinu' projections. * added lake_color keyword to fillcontinents. Modified: trunk/toolkits/basemap/examples/quiver_demo.py =================================================================== --- trunk/toolkits/basemap/examples/quiver_demo.py 2007-11-22 15:45:42 UTC (rev 4421) +++ trunk/toolkits/basemap/examples/quiver_demo.py 2007-11-22 16:17:03 UTC (rev 4422) @@ -1,6 +1,7 @@ from matplotlib.toolkits.basemap import Basemap -from pylab import show, title, arange, meshgrid, cm, figure, sqrt, \ - colorbar, axes, gca, reshape, array, Float32, quiverkey +import numpy +from pylab import show, title, arange, meshgrid, cm, figure, \ + colorbar, axes, gca, reshape, quiverkey # read in data. file = open('fcover.dat','r') @@ -12,9 +13,9 @@ ul.append(float(l[0])) vl.append(float(l[1])) pl.append(float(l[2])) -u = reshape(array(ul,Float32),(nlats,nlons)) -v = reshape(array(vl,Float32),(nlats,nlons)) -p = reshape(array(pl,Float32),(nlats,nlons)) +u = reshape(numpy.array(ul,numpy.float32),(nlats,nlons)) +v = reshape(numpy.array(vl,numpy.float32),(nlats,nlons)) +p = reshape(numpy.array(pl,numpy.float32),(nlats,nlons)) lats1 = -90.+dellat*arange(nlats) lons1 = -180.+dellon*arange(nlons) lons, lats = meshgrid(lons1, lats1) @@ -61,7 +62,7 @@ # and interpolation). nxv = 41; nyv = 41 nxp = 101; nyp = 101 -spd = sqrt(u**2+v**2) +spd = numpy.sqrt(u**2+v**2) udat, vdat, xv, yv = m.transform_vector(u,v,lons1,lats1,nxv,nyv,returnxy=True) pdat, xp, yp = m.transform_scalar(p,lons1,lats1,nxp,nyp,returnxy=True) # create a figure, add an axes. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-23 18:07:44
|
Revision: 4429 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4429&view=rev Author: jswhit Date: 2007-11-23 10:07:27 -0800 (Fri, 23 Nov 2007) Log Message: ----------- added 'sstanom' colormap. Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/cm.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-23 17:54:40 UTC (rev 4428) +++ trunk/toolkits/basemap/Changelog 2007-11-23 18:07:27 UTC (rev 4429) @@ -1,3 +1,5 @@ + * added 'sstanom' colormap from + http://www.ghrsst-pp.org/GHRSST-PP-Data-Tools.html version 0.9.7 (svn revision 4422) * fixed bug in drawlsmask for 'moll','robin' and 'sinu' projections. Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/cm.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/cm.py 2007-11-23 17:54:40 UTC (rev 4428) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/cm.py 2007-11-23 18:07:27 UTC (rev 4429) @@ -45,6 +45,8 @@ s3pcpn = colors.LinearSegmentedColormap('s3pcpn', _s3pcpn_data, _LUTSIZE) s3pcpn_l = colors.LinearSegmentedColormap('s3pcpn_l', _s3pcpn_l_data, _LUTSIZE) StepSeq = colors.LinearSegmentedColormap('StepSeq', _StepSeq_data, _LUTSIZE) +_sstanom_data = {'blue': [(0.0, 0.858823537827, 0.858823537827), (0.0243902429938, 0.835294127464, 0.835294127464), (0.0487804859877, 0.815686285496, 0.815686285496), (0.0731707289815, 0.78823530674, 0.78823530674), (0.0975609719753, 0.819607853889, 0.819607853889), (0.12195122242, 0.882352948189, 0.882352948189), (0.146341457963, 0.945098042488, 0.945098042488), (0.170731708407, 1.0, 1.0), (0.195121943951, 1.0, 1.0), (0.219512194395, 1.0, 1.0), (0.243902444839, 1.0, 1.0), (0.268292695284, 0.886274516582, 0.886274516582), (0.292682915926, 0.745098054409, 0.745098054409), (0.31707316637, 0.603921592236, 0.603921592236), (0.341463416815, 0.51372551918, 0.51372551918), (0.365853667259, 0.552941203117, 0.552941203117), (0.390243887901, 0.588235318661, 0.588235318661), (0.414634138346, 0.623529434204, 0.623529434204), (0.43902438879, 0.658823549747, 0.658823549747), (0.463414639235, 0.694117665291, 0.694117665291), (0.487804889679, 0.729411780834, 0.729411780834), (0.512195110321, 0.717647075653, 0.717647075653), (0.536585390568, 0.658823549747, 0.658823549747), (0.56097561121, 0.603921592236, 0.603921592236), (0.585365831852, 0.537254929543, 0.537254929543), (0.609756112099, 0.380392163992, 0.380392163992), (0.634146332741, 0.227450981736, 0.227450981736), (0.658536612988, 0.0431372560561, 0.0431372560561), (0.68292683363, 0.0, 0.0), (0.707317054272, 0.0, 0.0), (0.731707334518, 0.0, 0.0), (0.756097555161, 0.0, 0.0), (0.780487775803, 0.0, 0.0), (0.804878056049, 0.0, 0.0), (0.829268276691, 0.0, 0.0), (0.853658556938, 0.145098045468, 0.145098045468), (0.87804877758, 0.309803932905, 0.309803932905), (0.902438998222, 0.478431373835, 0.478431373835), (0.926829278469, 0.529411792755, 0.529411792755), (0.951219499111, 0.333333343267, 0.333333343267), (0.975609779358, 0.168627455831, 0.168627455831), (1.0, 0.0, 0.0)], 'green': [(0.0, 0.0, 0.0), (0.0243902429938, 0.0, 0.0), (0.0487804859877, 0.0, 0.0), (0.0731707289815, 0.0, 0.0), (0.0975609719753, 0.0941176488996, 0.0941176488996), (0.12195122242, 0.235294118524, 0.235294118524), (0.146341457963, 0.380392163992, 0.380392163992), (0.170731708407, 0.521568655968, 0.521568655968), (0.195121943951, 0.662745118141, 0.662745118141), (0.219512194395, 0.827450990677, 0.827450990677), (0.243902444839, 0.96862745285, 0.96862745285), (0.268292695284, 1.0, 1.0), (0.292682915926, 1.0, 1.0), (0.31707316637, 1.0, 1.0), (0.341463416815, 1.0, 1.0), (0.365853667259, 1.0, 1.0), (0.390243887901, 1.0, 1.0), (0.414634138346, 1.0, 1.0), (0.43902438879, 0.933333337307, 0.933333337307), (0.463414639235, 0.86274510622, 0.86274510622), (0.487804889679, 0.792156875134, 0.792156875134), (0.512195110321, 0.792156875134, 0.792156875134), (0.536585390568, 0.86274510622, 0.86274510622), (0.56097561121, 0.933333337307, 0.933333337307), (0.585365831852, 0.996078431606, 0.996078431606), (0.609756112099, 0.92549020052, 0.92549020052), (0.634146332741, 0.854901969433, 0.854901969433), (0.658536612988, 0.772549033165, 0.772549033165), (0.68292683363, 0.701960802078, 0.701960802078), (0.707317054272, 0.631372570992, 0.631372570992), (0.731707334518, 0.556862771511, 0.556862771511), (0.756097555161, 0.470588237047, 0.470588237047), (0.780487775803, 0.329411774874, 0.329411774874), (0.804878056049, 0.160784319043, 0.160784319043), (0.829268276691, 0.0196078438312, 0.0196078438312), (0.853658556938, 0.0, 0.0), (0.87804877758, 0.0, 0.0), (0.902438998222, 0.0, 0.0), (0.926829278469, 0.0, 0.0), (0.951219499111, 0.0, 0.0), (0.975609779358, 0.0, 0.0), (1.0, 0.0, 0.0)], 'red': [(0.0, 0.419607847929, 0.419607847929), (0.0243902429938, 0.478431373835, 0.478431373835), (0.0487804859877, 0.541176497936, 0.541176497936), (0.0731707289815, 0.611764729023, 0.611764729023), (0.0975609719753, 0.51372551918, 0.51372551918), (0.12195122242, 0.333333343267, 0.333333343267), (0.146341457963, 0.152941182256, 0.152941182256), (0.170731708407, 0.0, 0.0), (0.195121943951, 0.0, 0.0), (0.219512194395, 0.0, 0.0), (0.243902444839, 0.0, 0.0), (0.268292695284, 0.113725490868, 0.113725490868), (0.292682915926, 0.254901975393, 0.254901975393), (0.31707316637, 0.40000000596, 0.40000000596), (0.341463416815, 0.521568655968, 0.521568655968), (0.365853667259, 0.603921592236, 0.603921592236), (0.390243887901, 0.678431391716, 0.678431391716), (0.414634138346, 0.749019622803, 0.749019622803), (0.43902438879, 0.752941191196, 0.752941191196), (0.463414639235, 0.749019622803, 0.749019622803), (0.487804889679, 0.749019622803, 0.749019622803), (0.512195110321, 0.792156875134, 0.792156875134), (0.536585390568, 0.86274510622, 0.86274510622), (0.56097561121, 0.933333337307, 0.933333337307), (0.585365831852, 1.0, 1.0), (0.609756112099, 1.0, 1.0), (0.634146332741, 1.0, 1.0), (0.658536612988, 1.0, 1.0), (0.68292683363, 1.0, 1.0), (0.707317054272, 1.0, 1.0), (0.731707334518, 1.0, 1.0), (0.756097555161, 1.0, 1.0), (0.780487775803, 1.0, 1.0), (0.804878056049, 1.0, 1.0), (0.829268276691, 1.0, 1.0), (0.853658556938, 0.964705884457, 0.964705884457), (0.87804877758, 0.92549020052, 0.92549020052), (0.902438998222, 0.890196084976, 0.890196084976), (0.926829278469, 0.827450990677, 0.827450990677), (0.951219499111, 0.705882370472, 0.705882370472), (0.975609779358, 0.603921592236, 0.603921592236), (1.0, 0.501960813999, 0.501960813999)]} +sstanom = colors.LinearSegmentedColormap('sstanom', _sstanom_data, _LUTSIZE) datad={} datad['GMT_drywet']=_GMT_drywet_data datad['GMT_gebco']=_GMT_gebco_data @@ -61,6 +63,7 @@ datad['s3pcpn']=_s3pcpn_data datad['s3pcpn_l']=_s3pcpn_l_data datad['StepSeq']=_StepSeq_data +datad['sstanom']=_sstanom_data # reverse all the colormaps. # reversed colormaps have '_r' appended to the name. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-24 13:25:09
|
Revision: 4430 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4430&view=rev Author: jswhit Date: 2007-11-24 05:25:06 -0800 (Sat, 24 Nov 2007) Log Message: ----------- use drawmapboundary to fill map projection region a specified color. Useful for painting ocean areas (instead of setting axis background color, which only works for rectangular projections). Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/examples/customticks.py trunk/toolkits/basemap/examples/hires.py trunk/toolkits/basemap/examples/ortho_demo.py trunk/toolkits/basemap/examples/randompoints.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/Changelog 2007-11-24 13:25:06 UTC (rev 4430) @@ -1,3 +1,5 @@ + * add 'fill_color' option to drawmapboundary, to optionally + fill the map projection background a certain color. * added 'sstanom' colormap from http://www.ghrsst-pp.org/GHRSST-PP-Data-Tools.html version 0.9.7 (svn revision 4422) Modified: trunk/toolkits/basemap/examples/customticks.py =================================================================== --- trunk/toolkits/basemap/examples/customticks.py 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/examples/customticks.py 2007-11-24 13:25:06 UTC (rev 4430) @@ -20,17 +20,17 @@ # create figure. fig=pylab.figure() -# background color will be used for 'wet' areas. -fig.add_axes([0.1,0.1,0.8,0.8],axisbg='aqua') # create Basemap instance (regular lat/lon projection). # suppress_ticks=False allows custom axes ticks to be used # Ticks are suppressed by default, so Basemap methods # drawparallels and drawmeridians used to draw labelled lat/lon grid. m = Basemap(llcrnrlon=-156.5,llcrnrlat=18.75,urcrnrlon=-154.5,urcrnrlat=20.5, resolution='h',projection='cyl',suppress_ticks=False) -# draw coastlines, fill land areas. +# draw coastlines, fill land and lake areas. m.drawcoastlines() -m.fillcontinents(color="coral") +m.fillcontinents(color='coral',lake_color='aqua') +# background color will be used for oceans. +m.drawmapboundary(fill_color='aqua') # get axes instance. ax = pylab.gca() # add custom ticks. Modified: trunk/toolkits/basemap/examples/hires.py =================================================================== --- trunk/toolkits/basemap/examples/hires.py 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/examples/hires.py 2007-11-24 13:25:06 UTC (rev 4430) @@ -21,15 +21,18 @@ # clear the figure clf() -ax = fig.add_axes([0.1,0.1,0.8,0.8],axisbg='aqua') # read cPickle back in and plot it again (should be much faster). t1 = time.clock() m2 = cPickle.load(open('map.pickle','rb')) # draw coastlines and fill continents. m.drawcoastlines() -m.fillcontinents(color='coral') +# fill continents and lakes +m.fillcontinents(color='coral',lake_color='aqua') # draw political boundaries. m.drawcountries(linewidth=1) +# fill map projection region light blue (this will +# paint ocean areas same color as lakes). +m.drawmapboundary(fill_color='aqua') # draw major rivers. m.drawrivers(color='b') print time.clock()-t1,' secs to plot using using a pickled Basemap instance' Modified: trunk/toolkits/basemap/examples/ortho_demo.py =================================================================== --- trunk/toolkits/basemap/examples/ortho_demo.py 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/examples/ortho_demo.py 2007-11-24 13:25:06 UTC (rev 4430) @@ -23,11 +23,11 @@ fig = figure() m = Basemap(projection='ortho',lon_0=lon_0,lat_0=lat_0,resolution='l') m.drawcoastlines() -m.fillcontinents(color='coral') +m.fillcontinents(color='coral',lake_color='aqua') m.drawcountries() # draw parallels and meridians. m.drawparallels(arange(-90.,120.,30.)) m.drawmeridians(arange(0.,420.,60.)) -m.drawmapboundary() +m.drawmapboundary(fill_color='aqua') title('Orthographic Map Centered on Lon=%s, Lat=%s' % (lon_0,lat_0)) show() Modified: trunk/toolkits/basemap/examples/randompoints.py =================================================================== --- trunk/toolkits/basemap/examples/randompoints.py 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/examples/randompoints.py 2007-11-24 13:25:06 UTC (rev 4430) @@ -24,8 +24,6 @@ # plot them as filled circles on the map. # first, create a figure. fig=figure() -# background color will be used for 'wet' areas. -fig.add_axes([0.1,0.1,0.8,0.8],axisbg='aqua') # draw colored markers. # use zorder=10 to make sure markers are drawn last. # (otherwise they are covered up when continents are filled) @@ -39,9 +37,11 @@ if xpt > m.xmin and xpt < m.xmax and ypt > m.ymin and ypt < m.ymax: hexcolor = rgb2hex(cm.jet(zval/100.)[:3]) text(xpt,ypt,numstr,fontsize=9,weight='bold',color=hexcolor) -# draw coasts and fill continents. +# draw coasts and fill continents/lakes. m.drawcoastlines(linewidth=0.5) -m.fillcontinents(color='coral') +m.fillcontinents(color='coral',lake_color='aqua') +# color ocean areas +m.drawmapboundary(fill_color='aqua') # draw parallels and meridians. delat = 20. circles = arange(0.,90.,delat).tolist()+\ Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-23 18:07:27 UTC (rev 4429) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-24 13:25:06 UTC (rev 4430) @@ -962,11 +962,20 @@ return boundaryll, boundaryxy - def drawmapboundary(self,color='k',linewidth=1.0,ax=None): + def drawmapboundary(self,color='k',linewidth=1.0,fill_color=None,\ + zorder=None,ax=None): """ - draw boundary around map projection region. If ax=None (default), - default axis instance is used, otherwise specified axis - instance is used. + draw boundary around map projection region, optionally + filling interior of region. + + linewidth - line width for boundary (default 1.) + color - color of boundary line (default black) + fill_color - fill the map region background with this + color (default is no fill or fill with axis background color). + zorder - sets the zorder for filling map background + (default 0). + ax - axes instance to use (default None, use default axes + instance). """ # get current axes instance (if none specified). if ax is None and self.ax is None: @@ -981,18 +990,30 @@ # define a circle patch, add it to axes instance. circle = Circle((self.rmajor,self.rmajor),self.rmajor) ax.add_patch(circle) - circle.set_fill(False) + if fill_color is None: + circle.set_fill(False) + else: + circle.set_facecolor(fill_color) + circle.set_zorder(0) circle.set_edgecolor(color) circle.set_linewidth(linewidth) circle.set_clip_on(False) + if zorder is not None: + circle.set_zorder(zorder) elif self.projection == 'geos' and self._fulldisk: # elliptical region # define an Ellipse patch, add it to axes instance. ellps = Ellipse((self._width,self._height),2.*self._width,2.*self._height) ax.add_patch(ellps) - ellps.set_fill(False) + if fill_color is None: + ellps.set_fill(False) + else: + ellps.set_facecolor(fill_color) + ellps.set_zorder(0) ellps.set_edgecolor(color) ellps.set_linewidth(linewidth) ellps.set_clip_on(False) + if zorder is not None: + ellps.set_zorder(0) elif self.projection in ['moll','robin','sinu']: # elliptical region. nx = 100; ny = 100 # quasi-elliptical region. @@ -1015,13 +1036,25 @@ xy = zip(x,y) poly = Polygon(xy,edgecolor=color,linewidth=linewidth) ax.add_patch(poly) - poly.set_fill(False) + if fill_color is None: + poly.set_fill(False) + else: + poly.set_facecolor(fill_color) + poly.set_zorder(0) poly.set_clip_on(False) + if zorder is not None: + poly.set_zorder(zorder) else: # all other projections are rectangular. ax.axesPatch.set_linewidth(linewidth) - ax.axesPatch.set_facecolor(ax.get_axis_bgcolor()) + if fill_color is None: + ax.axesPatch.set_facecolor(ax.get_axis_bgcolor()) + else: + ax.axesPatch.set_facecolor(fill_color) + ax.axesPatch.set_zorder(0) ax.axesPatch.set_edgecolor(color) ax.set_frame_on(True) + if zorder is not None: + ax.axesPatch.set_zorder(zorder) # set axes limits to fit map region. self.set_axes_limits(ax=ax) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-26 18:30:20
|
Revision: 4449 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4449&view=rev Author: jswhit Date: 2007-11-26 10:29:14 -0800 (Mon, 26 Nov 2007) Log Message: ----------- add the ability to read remote datasets over http using the opendap client by Roberto D'Almeida (included). NetCDFFile modified so that it uses the opendap client if the filename begins with 'http'. Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/examples/fcstmaps.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/pupynere.py trunk/toolkits/basemap/setup.py Added Paths: ----------- trunk/toolkits/basemap/lib/dap/ trunk/toolkits/basemap/lib/dap/__init__.py trunk/toolkits/basemap/lib/dap/client.py trunk/toolkits/basemap/lib/dap/dtypes.py trunk/toolkits/basemap/lib/dap/exceptions.py trunk/toolkits/basemap/lib/dap/helper.py trunk/toolkits/basemap/lib/dap/lib.py trunk/toolkits/basemap/lib/dap/parsers/ trunk/toolkits/basemap/lib/dap/parsers/__init__.py trunk/toolkits/basemap/lib/dap/parsers/das.py trunk/toolkits/basemap/lib/dap/parsers/dds.py trunk/toolkits/basemap/lib/dap/util/ trunk/toolkits/basemap/lib/dap/util/__init__.py trunk/toolkits/basemap/lib/dap/util/filter.py trunk/toolkits/basemap/lib/dap/util/http.py trunk/toolkits/basemap/lib/dap/util/ordereddict.py trunk/toolkits/basemap/lib/dap/util/safeeval.py trunk/toolkits/basemap/lib/dap/util/wsgi_intercept.py trunk/toolkits/basemap/lib/dap/xdr.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-26 17:23:18 UTC (rev 4448) +++ trunk/toolkits/basemap/Changelog 2007-11-26 18:29:14 UTC (rev 4449) @@ -1,3 +1,5 @@ + * modify NetCDFFile to use dap module to read remote + datasets over http. Include dap module. * modify NetCDFFile to automatically apply scale_factor and add_offset, and return masked arrays masked where data == missing_value or _FillValue. Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2007-11-26 17:23:18 UTC (rev 4448) +++ trunk/toolkits/basemap/MANIFEST.in 2007-11-26 18:29:14 UTC (rev 4449) @@ -74,6 +74,9 @@ include pyshapelib/shapelib/*.c pyshapelib/shapelib/*.h include MANIFEST.in recursive-include geos-2.2.3 * +recursive-include lib/dap * +recursive-include lib/dbflib * +recursive-include lib/shapelib * include lib/matplotlib/toolkits/basemap/data/5minmask.bin include lib/matplotlib/toolkits/basemap/data/GL27 include lib/matplotlib/toolkits/basemap/data/countries_c.dat Modified: trunk/toolkits/basemap/examples/fcstmaps.py =================================================================== --- trunk/toolkits/basemap/examples/fcstmaps.py 2007-11-26 17:23:18 UTC (rev 4448) +++ trunk/toolkits/basemap/examples/fcstmaps.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -1,19 +1,14 @@ # this example reads today's numerical weather forecasts # from the NOAA OpenDAP servers and makes a multi-panel plot. -# Requires the pyDAP module (a pure-python module) -# from http://pydap.org, and an active intenet connection. - -try: - from dap import client -except: - raise ImportError,"requires pyDAP module (version 2.1 or higher) from http://pydap.org" -from pylab import title, show, figure, cm, arange, frange, figtext, \ - meshgrid, axes, colorbar, where, amin, amax, around +from pylab import title, show, figure, cm, figtext, \ + meshgrid, axes, colorbar +import numpy import sys from matplotlib.numerix import ma import datetime -from matplotlib.toolkits.basemap import Basemap +from matplotlib.toolkits.basemap import Basemap, NetCDFFile, addcyclic + hrsgregstart = 13865688 # hrs from 00010101 to 15821015 in Julian calendar. # times in many datasets use mixed Gregorian/Julian calendar, datetime # module uses a proleptic Gregorian calendar. So, I use datetime to compute @@ -48,12 +43,11 @@ YYYYMM = YYYYMMDD[0:6] # set OpenDAP server URL. -HH='09' -URLbase="http://nomad3.ncep.noaa.gov:9090/dods/sref/sref" -URL=URLbase+YYYYMMDD+"/sref_eta_ctl1_"+HH+"z" +URLbase="http://nomad3.ncep.noaa.gov:9090/dods/mrf/mrf" +URL=URLbase+YYYYMMDD+'/mrf'+YYYYMMDD print URL+'\n' try: - data = client.open(URL) + data = NetCDFFile(URL) except: msg = """ opendap server not providing the requested data. @@ -61,14 +55,14 @@ raise IOError, msg -# read levels, lats,lons,times. +# read lats,lons,times. -print data.keys() -levels = data['lev'] -latitudes = data['lat'] -longitudes = data['lon'] -fcsttimes = data['time'] -times = fcsttimes[:] +print data.variables.keys() +latitudes = data.variables['lat'] +longitudes = data.variables['lon'] +fcsttimes = data.variables['time'] +times = fcsttimes[0:6] # first 6 forecast times. +ntimes = len(times) # put forecast times in YYYYMMDDHH format. verifdates = [] fcsthrs=[] @@ -79,55 +73,43 @@ verifdates.append(fdate.strftime('%Y%m%d%H')) print fcsthrs print verifdates -levs = levels[:] lats = latitudes[:] -lons = longitudes[:] -lons, lats = meshgrid(lons,lats) +nlats = len(lats) +lons1 = longitudes[:] +nlons = len(lons1) # unpack 2-meter temp forecast data. -t2mvar = data['tmp2m'] -missval = t2mvar.missing_value -t2m = t2mvar[:,:,:] -if missval < 0: - t2m = ma.masked_values(where(t2m>-1.e20,t2m,1.e20), 1.e20) -else: - t2m = ma.masked_values(where(t2m<1.e20,t2m,1.e20), 1.e20) -t2min = amin(t2m.compressed()); t2max= amax(t2m.compressed()) -print t2min,t2max -clevs = frange(around(t2min/10.)*10.-5.,around(t2max/10.)*10.+5.,4) -print clevs[0],clevs[-1] -llcrnrlat = 22.0 -urcrnrlat = 48.0 -latminout = 22.0 -llcrnrlon = -125.0 -urcrnrlon = -60.0 -standardpar = 50.0 -centerlon=-105. -# create Basemap instance for Lambert Conformal Conic projection. -m = Basemap(llcrnrlon=llcrnrlon,llcrnrlat=llcrnrlat, - urcrnrlon=urcrnrlon,urcrnrlat=urcrnrlat, +t2mvar = data.variables['tmp2m'] +t2min = t2mvar[0:ntimes,:,:] +t2m = numpy.zeros((ntimes,nlats,nlons+1),t2min.dtype) +# create Basemap instance for Orthographic projection. +m = Basemap(lon_0=-105,lat_0=40, rsphere=6371200., - resolution='l',area_thresh=5000.,projection='lcc', - lat_1=standardpar,lon_0=centerlon) + resolution='c',area_thresh=5000.,projection='ortho') +# add wrap-around point in longitude. +for nt in range(ntimes): + t2m[nt,:,:], lons = addcyclic(t2min[nt,:,:], lons1) +# convert to celsius. +t2m = t2m-273.15 +# contour levels +clevs = numpy.arange(-30,30.1,2.) +lons, lats = meshgrid(lons, lats) x, y = m(lons, lats) # create figure. -fig=figure(figsize=(8,8)) -yoffset = (m.urcrnry-m.llcrnry)/30. -for npanel,fcsthr in enumerate(arange(0,72,12)): - nt = fcsthrs.index(fcsthr) - ax = fig.add_subplot(320+npanel+1) - #cs = m.contour(x,y,t2m[nt,:,:],clevs,colors='k') - cs = m.contourf(x,y,t2m[nt,:,:],clevs,cmap=cm.jet) - m.drawcoastlines() - m.drawstates() +fig=figure(figsize=(6,8)) +# make subplots. +for nt,fcsthr in enumerate(fcsthrs): + ax = fig.add_subplot(321+nt) + cs = m.contourf(x,y,t2m[nt,:,:],clevs,cmap=cm.jet,extend='both') + m.drawcoastlines(linewidth=0.5) m.drawcountries() - m.drawparallels(arange(25,75,20),labels=[1,0,0,0],fontsize=8,fontstyle='oblique') - m.drawmeridians(arange(-140,0,20),labels=[0,0,0,1],fontsize=8,yoffset=yoffset,fontstyle='oblique') + m.drawparallels(numpy.arange(-80,81,20)) + m.drawmeridians(numpy.arange(0,360,20)) # panel title - title(repr(fcsthr)+'-h forecast valid '+verifdates[nt],fontsize=12) + title(repr(fcsthr)+'-h forecast valid '+verifdates[nt],fontsize=9) # figure title -figtext(0.5,0.95,u"2-m temp (\N{DEGREE SIGN}K) forecasts from %s"%verifdates[0], +figtext(0.5,0.95,u"2-m temp (\N{DEGREE SIGN}C) forecasts from %s"%verifdates[0], horizontalalignment='center',fontsize=14) # a single colorbar. cax = axes([0.1, 0.03, 0.8, 0.025]) Added: trunk/toolkits/basemap/lib/dap/__init__.py =================================================================== --- trunk/toolkits/basemap/lib/dap/__init__.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/__init__.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,12 @@ +"""A Python implementation of the Data Access Protocol (DAP). + +Pydap is a Python module implementing the Data Access Protocol (DAP) +written from scratch. The module implements a DAP client, allowing +transparent and efficient access to dataset stored in DAP server, and +also implements a DAP server able to serve data from a variety of +formats. + +For more information about the protocol, please check http://opendap.org. +""" + +__import__('pkg_resources').declare_namespace(__name__) Added: trunk/toolkits/basemap/lib/dap/client.py =================================================================== --- trunk/toolkits/basemap/lib/dap/client.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/client.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,67 @@ +__author__ = "Roberto De Almeida <ro...@py...>" + +import dap.lib +from dap.util.http import openurl +from dap.exceptions import ClientError + + +def open(url, cache=None, username=None, password=None, verbose=False): + """Connect to a remote dataset. + + This function opens a dataset stored in a DAP server: + + >>> dataset = open(url, cache=None, username=None, password=None, verbose=False): + + You can specify a cache location (a directory), so that repeated + accesses to the same URL avoid the network. + + The username and password may be necessary if the DAP server requires + authentication. The 'verbose' option will make pydap print all the + URLs that are acessed. + """ + # Set variables on module namespace. + dap.lib.VERBOSE = verbose + + if url.startswith('http'): + for response in [_ddx, _ddsdas]: + dataset = response(url, cache, username, password) + if dataset: return dataset + else: + raise ClientError("Unable to open dataset.") + else: + from dap.plugins.lib import loadhandler + from dap.helper import walk + + # Open a local file. This is a clever hack. :) + handler = loadhandler(url) + dataset = handler._parseconstraints() + + # Unwrap any arrayterators in the dataset. + for var in walk(dataset): + try: var.data = var.data._var + except: pass + + return dataset + + +def _ddsdas(baseurl, cache, username, password): + ddsurl, dasurl = '%s.dds' % baseurl, '%s.das' % baseurl + + # Get metadata. + respdds, dds = openurl(ddsurl, cache, username, password) + respdas, das = openurl(dasurl, cache, username, password) + + if respdds['status'] == '200' and respdas['status'] == '200': + from dap.parsers.dds import DDSParser + from dap.parsers.das import DASParser + + # Build dataset. + dataset = DDSParser(dds, ddsurl, cache, username, password).parse() + + # Add attributes. + dataset = DASParser(das, dasurl, dataset).parse() + return dataset + + +def _ddx(baseurl, cache, username, password): + pass Added: trunk/toolkits/basemap/lib/dap/dtypes.py =================================================================== --- trunk/toolkits/basemap/lib/dap/dtypes.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/dtypes.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,529 @@ +"""DAP variables. + +This module is a Python implementation of the DAP data model. +""" + +__author__ = "Roberto De Almeida <ro...@py...>" + +import copy +import itertools + +from dap.lib import quote, to_list, _quote +from dap.util.ordereddict import odict +from dap.util.filter import get_filters + +__all__ = ['StructureType', 'SequenceType', 'DatasetType', 'GridType', 'ArrayType', 'BaseType', + 'Float', 'Float0', 'Float8', 'Float16', 'Float32', 'Float64', 'Int', 'Int0', 'Int8', + 'Int16', 'Int32', 'Int64', 'UInt16', 'UInt32', 'UInt64', 'Byte', 'String', 'Url'] + +_basetypes = ['Float32', 'Float64', 'Int16', 'Int32', 'UInt16', 'UInt32', 'Byte', 'String', 'Url'] +_constructors = ['StructureType', 'SequenceType', 'DatasetType', 'GridType', 'ArrayType'] + +# Constants. +Float = 'Float64' +Float0 = 'Float64' +Float8 = 'Float32' +Float16 = 'Float32' +Float32 = 'Float32' +Float64 = 'Float64' +Int = 'Int32' +Int0 = 'Int32' +Int8 = 'Byte' +Int16 = 'Int16' +Int32 = 'Int32' +Int64 = 'Int32' +UInt16 = 'UInt16' +UInt32 = 'UInt32' +UInt64 = 'UInt32' +UInt8 = 'Byte' +Byte = 'Byte' +String = 'String' +Url = 'Url' + +typemap = { + # numpy + 'd': Float64, + 'f': Float32, + 'l': Int32, + 'b': Byte, + 'h': Int16, + 'q': Int32, + 'H': UInt16, + 'L': UInt32, + 'Q': UInt32, + 'B': Byte, + 'S': String, + } + + +class StructureType(odict): + """Structure contructor. + + A structure is a dict-like object, which can hold other DAP variables. + Structures have a 'data' attribute that combines the data from the + stored variables when read, and propagates the data to the variables + when set. + + This behaviour can be bypassed by setting the '_data' attribute; in + this case, no data is propagated, and further reads do not combine the + data from the stored variables. + """ + + def __init__(self, name='', attributes=None): + odict.__init__(self) + self.name = quote(name) + self.attributes = attributes or {} + + self._id = name + self._filters = [] + self._data = None + + def __iter__(self): + # Iterate over the variables contained in the structure. + return self.itervalues() + + walk = __iter__ + + def __getattr__(self, attr): + # Try to return stored variable. + try: + return self[attr] + except KeyError: + # Try to return attribute from self.attributes. + try: return self.attributes[attr] + except KeyError: raise AttributeError + + def __setitem__(self, key, item): + # Assign a new variable and apply the proper id. + self._dict.__setitem__(key, item) + if key not in self._keys: self._keys.append(key) + + # Ensure that stored objects have the proper id. + item._set_id(self._id) + + def _get_data(self): + if self._data is not None: + return self._data + else: + return [var.data for var in self.values()] + + def _set_data(self, data): + # Propagate the data to the stored variables. + for data_, var in itertools.izip(data, self.values()): + var.data = data_ + + data = property(_get_data, _set_data) + + def _get_id(self): + return self._id + + def _set_id(self, parent=None): + if parent: + self._id = '%s.%s' % (parent, self.name) + else: + self._id = self.name + + # Propagate id to stored variables. + for var in self.values(): var._set_id(self._id) + + id = property(_get_id) # Read-only. + + def _get_filters(self): + return self._filters + + def _set_filters(self, f): + self._filters.append(f) + + # Propagate filter to stored variables. + for var in self.values(): var._set_filters(f) + + filters = property(_get_filters, _set_filters) + + def __copy__(self): + out = self.__class__(name=self.name, attributes=self.attributes.copy()) + out._id = self._id + out._filters = self._filters[:] + out._data = self._data + + # Stored variables *are not* copied. + for k, v in self.items(): + out[k] = v + return out + + def __deepcopy__(self, memo=None, _nil=[]): + out = self.__class__(name=self.name, attributes=self.attributes.copy()) + out._id = self._id + out._filters = self._filters[:] + out._data = self._data + + # Stored variables *are* (deep) copied. + for k, v in self.items(): + out[k] = copy.deepcopy(v) + return out + + +class DatasetType(StructureType): + """Dataset constructor. + + A dataset is very similar to a structure -- the main difference is that + its name is not used when composing the fully qualified name of stored + variables. + """ + + def __setitem__(self, key, item): + self._dict.__setitem__(key, item) + if key not in self._keys: self._keys.append(key) + + # Set the id. Here the parent should be None, since the dataset + # id is not part of the fully qualified name. + item._set_id(None) + + def _set_id(self, parent=None): + self._id = self.name + + # Propagate id. + for var in self.values(): var._set_id(None) + + +class SequenceType(StructureType): + """Sequence constructor. + + A sequence contains ordered data, corresponding to the records in + a sequence of structures with the same stored variables. + """ + # Nesting level. Sequences inside sequences have a level 2, and so on. + level = 1 + + def __setitem__(self, key, item): + # Assign a new variable and apply the proper id. + self._dict.__setitem__(key, item) + if key not in self._keys: self._keys.append(key) + + # Ensure that stored objects have the proper id. + item._set_id(self._id) + + # If the variable is a sequence, set the nesting level. + def set_level(seq, level): + if isinstance(seq, SequenceType): + seq.level = level + for child in seq.walk(): set_level(child, level+1) + set_level(item, self.level+1) + + def walk(self): + # Walk over the variables contained in the structure. + return self.itervalues() + + def _get_data(self): + # This is similar to the structure _get_data method, except that data + # is combined from stored variables using zip(), i.e., grouped values + # from each variable. + if self._data is not None: + return self._data + else: + return _build_data(self.level, *[var.data for var in self.values()]) + + def _set_data(self, data): + for data_, var in itertools.izip(_propagate_data(self.level, data), self.values()): + var.data = data_ + + data = property(_get_data, _set_data) + + def __iter__(self): + """ + When iterating over a sequence, we yield structures containing the + corresponding data (first record, second, etc.). + """ + out = self.__deepcopy__() + + # Set server-side filters. When the sequence is iterated in a + # listcomp/genexp, this function inspects the stack and tries to + # build a server-side filter from the client-side filter. This + # is voodoo black magic, take care. + filters = get_filters(out) + for filter_ in filters: + out._set_filters(filter_) + + for values in out.data: + # Yield a nice structure. + struct_ = StructureType(name=out.name, attributes=out.attributes) + for data, name in zip(values, out.keys()): + var = struct_[name] = out[name].__deepcopy__() + var.data = data + # Set the id. This is necessary since the new structure is not + # contained inside a dataset. + parent = out._id[:-len(out.name)-1] + struct_._set_id(parent) + yield struct_ + + def filter(self, *filters): + # Make a copy of the sequence. + out = self.__deepcopy__() + + # And filter it according to the selection expression. + for filter_ in filters: + out._set_filters(_quote(filter_)) + return out + + +class GridType(object): + """Grid constructor. + + A grid is a constructor holding an 'array' variable. The array has its + dimensions mapped to 'maps' stored in the grid (lat, lon, time, etc.). + Most of the requests are simply passed onto the stored array. + """ + + def __init__(self, name='', array=None, maps=None, attributes=None): + self.name = quote(name) + self.array = array + self.maps = maps or odict() + self.attributes = attributes or {} + + self._id = name + self._filters = [] + + def __len__(self): + return self.array.shape[0] + + def __iter__(self): + # Iterate over the grid. Yield the array and then the maps. + yield self.array + for map_ in self.maps.values(): yield map_ + + walk = __iter__ + + def __getattr__(self, attr): + # Try to return attribute from self.attributes. + try: + return self.attributes[attr] + except KeyError: + raise AttributeError + + def __getitem__(self, index): + # Return data from the array. + return self.array[index] + + def _get_data(self): + return self.array.data + + def _set_data(self, data): + self.array.data = data + + data = property(_get_data, _set_data) + + def _get_id(self): + return self._id + + def _set_id(self, parent=None): + if parent: self._id = '%s.%s' % (parent, self.name) + else: self._id = self.name + + # Propagate id to array and maps. + if self.array: self.array._set_id(self._id) + for map_ in self.maps.values(): + map_._set_id(self._id) + + id = property(_get_id) + + def _get_filters(self): + return self._filters + + def _set_filters(self, f): + self.filters.append(f) + + # Propagate filter. + self.array._set_filters(f) + for map_ in self.maps.values(): + map_._set_filters(f) + + filters = property(_get_filters, _set_filters) + + def _get_dimensions(self): + # Return dimensions from stored maps. + return tuple(self.maps.keys()) + + dimensions = property(_get_dimensions) + + def _get_shape(self): + return self.array.shape + + def _set_shape(self, shape): + self.array.shape = shape + + shape = property(_get_shape, _set_shape) + + def _get_type(self): + return self.array.type + + def _set_type(self, type): + self.array.type = type + + type = property(_get_type, _set_type) + + def __copy__(self): + out = self.__class__(name=self.name, array=self.array, maps=self.maps, attributes=self.attributes.copy()) + out._id = self._id + out._filters = self._filters[:] + return out + + def __deepcopy__(self, memo=None, _nil=[]): + out = self.__class__(name=self.name, attributes=self.attributes.copy()) + out.array = copy.deepcopy(self.array) + out.maps = copy.deepcopy(self.maps) + out._id = self._id + out._filters = self._filters[:] + return out + + +class BaseType(object): + """DAP Base type. + + Variable holding a single value, or an iterable if it's stored inside + a sequence. It's the fundamental DAP variable, which actually holds + data (together with arrays). + """ + + def __init__(self, name='', data=None, type=None, attributes=None): + self.name = quote(name) + self.data = data + self.attributes = attributes or {} + + if type in _basetypes: self.type = type + else: self.type = typemap.get(type, Int32) + + self._id = name + self._filters = [] + + def __iter__(self): + # Yield the stored value. + # Perhaps we should raise StopIteration? + yield self.data + + def __getattr__(self, attr): + # Try to return attribute from self.attributes. + try: + return self.attributes[attr] + except KeyError: + raise AttributeError + + def __getitem__(self, key): + # Return data from the array. + return self.data[key] + + def __setitem__(self, key, item): + # Assign a new variable and apply the proper id. + self.data.__setitem__(key, item) + + def _get_id(self): + return self._id + + def _set_id(self, parent=None): + if parent: self._id = '%s.%s' % (parent, self.name) + else: self._id = self.name + + id = property(_get_id) # Read-only. + + def _get_filters(self): + return self._filters + + def _set_filters(self, f): + self._filters.append(f) + + # Propagate to data, if it's a Proxy object. + if hasattr(self.data, 'filters'): + self.data.filters = self._filters + + filters = property(_get_filters, _set_filters) + + def __copy__(self): + out = self.__class__(name=self.name, data=self.data, type=self.type, attributes=self.attributes.copy()) + out._id = self._id + out._filters = self._filters[:] + return out + + def __deepcopy__(self, memo=None, _nil=[]): + out = self.__class__(name=self.name, type=self.type, attributes=self.attributes.copy()) + + try: + out.data = copy.copy(self.data) + except TypeError: + self.data = to_list(self.data) + out.data = copy.copy(self.data) + + out._id = self._id + out._filters = self._filters[:] + return out + + # This allows the variable to be compared to numbers. + def __ge__(self, other): return self.data >= other + def __gt__(self, other): return self.data > other + def __le__(self, other): return self.data <= other + def __lt__(self, other): return self.data < other + def __eq__(self, other): return self.data == other + + +class ArrayType(BaseType): + """An array of BaseType variables. + + Although the DAP supports arrays of any DAP variables, pydap can only + handle arrays of base types. This makes the ArrayType class very + similar to a BaseType, with the difference that it'll hold an array + of data in its 'data' attribute. + + Array of constructors will not be supported until Python has a + native multi-dimensional array type. + """ + + def __init__(self, name='', data=None, shape=None, dimensions=None, type=None, attributes=None): + self.name = quote(name) + self.data = data + self.shape = shape or () + self.dimensions = dimensions or () + self.attributes = attributes or {} + + if type in _basetypes: self.type = type + else: self.type = typemap.get(type, Int32) + + self._id = name + self._filters = [] + + def __len__(self): + return self.shape[0] + + def __copy__(self): + out = self.__class__(name=self.name, data=self.data, shape=self.shape, dimensions=self.dimensions, type=self.type, attributes=self.attributes.copy()) + out._id = self._id + out._filters = self._filters[:] + return out + + def __deepcopy__(self, memo=None, _nil=[]): + out = self.__class__(name=self.name, shape=self.shape, dimensions=self.dimensions, type=self.type, attributes=self.attributes.copy()) + + try: + out.data = copy.copy(self.data) + except TypeError: + self.data = to_list(self.data) + out.data = copy.copy(self.data) + + out._id = self._id + out._filters = self._filters[:] + return out + + +# Functions for propagating data up and down in sequences. +# I'm not 100% sure how this works. +def _build_data(level, *vars_): + if level > 0: + out = [_build_data(level-1, *els) for els in itertools.izip(*vars_)] + else: + out = vars_ + + return out + +def _propagate_data(level, vars_): + if level > 0: + out = zip(*[_propagate_data(level-1, els) for els in vars_]) + else: + out = vars_ + + return out Added: trunk/toolkits/basemap/lib/dap/exceptions.py =================================================================== --- trunk/toolkits/basemap/lib/dap/exceptions.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/exceptions.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,49 @@ +"""DAP exceptions. + +These exceptions are mostly used by the server. When an exception is +captured, a proper error message is displayed (according to the DAP +2.0 spec), with information about the exception and the error code +associated with it. + +The error codes are attributed using the "first come, first serve" +algorithm. +""" + +__author__ = "Roberto De Almeida <ro...@py...>" + + +class DapError(Exception): + """Base DAP exception.""" + def __init__(self, value): + self.value = value + + def __str__(self): + return repr(self.value) + + +class ClientError(DapError): + """Generic error with the client.""" + code = 100 + + +class ServerError(DapError): + """Generic error with the server.""" + code = 200 + +class ConstraintExpressionError(ServerError): + """Exception raised when an invalid constraint expression is given.""" + code = 201 + + +class PluginError(DapError): + """Generic error with a plugin.""" + code = 300 + +class ExtensionNotSupportedError(PluginError): + """Exception raised when trying to open a file not supported by any plugins.""" + code = 301 + +class OpenFileError(PluginError): + """Exception raised when unable to open a file.""" + code = 302 + Added: trunk/toolkits/basemap/lib/dap/helper.py =================================================================== --- trunk/toolkits/basemap/lib/dap/helper.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/helper.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,803 @@ +"""Helper functions. + +These are generic functions used mostly for writing plugins. +""" + +__author__ = "Roberto De Almeida <ro...@py...>" + +import sys +import re +import operator +import itertools +import copy +from urllib import quote, unquote + +from dap.dtypes import * +from dap.dtypes import _basetypes +from dap.exceptions import ConstraintExpressionError +from dap.lib import isiterable +from dap.util.safeeval import expr_eval +from dap.util.ordereddict import odict + + +def constrain(dataset, constraints): + """A simple example. We create a dataset holding three variables: + + >>> dataset = DatasetType(name='foo') + >>> dataset['a'] = BaseType(name='a', type='Byte') + >>> dataset['b'] = BaseType(name='b', type='Byte') + >>> dataset['c'] = BaseType(name='c', type='Byte') + + Now we give it a CE requesting only the variables ``a`` and ``b``: + + >>> dataset2 = constrain(dataset, 'a,b') + >>> print dataset2 #doctest: +ELLIPSIS + {'a': <dap.dtypes.BaseType object at ...>, 'b': <dap.dtypes.BaseType object at ...>} + + We can also request the variables in a different order: + + >>> dataset2 = constrain(dataset, 'b,a') + >>> print dataset2 #doctest: +ELLIPSIS + {'b': <dap.dtypes.BaseType object at ...>, 'a': <dap.dtypes.BaseType object at ...>} + + Another example. A dataset with two structures ``a`` and ``b``: + + >>> dataset = DatasetType(name='foo') + >>> dataset['a'] = StructureType(name='a') + >>> dataset['a']['a1'] = BaseType(name='a1', type='Byte') + >>> dataset['b'] = StructureType(name='b') + >>> dataset['b']['b1'] = BaseType(name='b1', type='Byte') + >>> dataset['b']['b2'] = BaseType(name='b2', type='Byte') + + If we request the structure ``b`` we should get it complete: + + >>> dataset2 = constrain(dataset, 'a.a1,b') + >>> print dataset2 #doctest: +ELLIPSIS + {'a': {'a1': <dap.dtypes.BaseType object at ...>}, 'b': {'b1': <dap.dtypes.BaseType object at ...>, 'b2': <dap.dtypes.BaseType object at ...>}} + + >>> dataset2 = constrain(dataset, 'b.b1') + >>> print dataset2 #doctest: +ELLIPSIS + {'b': {'b1': <dap.dtypes.BaseType object at ...>}} + + Arrays can be sliced. Here we have a ``(2,3)`` array: + + >>> dataset = DatasetType(name='foo') + >>> from numpy import array + >>> data = array([1,2,3,4,5,6]) + >>> data.shape = (2,3) + >>> dataset['array'] = ArrayType(data=data, name='array', shape=(2,3), type='Int32') + >>> dataset2 = constrain(dataset, 'array') + >>> from dap.server import SimpleHandler + >>> headers, output = SimpleHandler(dataset).dds() + >>> print ''.join(output) + Dataset { + Int32 array[2][3]; + } foo; + <BLANKLINE> + >>> print dataset2['array'].data + [[1 2 3] + [4 5 6]] + + But we request only part of it: + + >>> dataset2 = constrain(dataset, 'array[0:1:1][0:1:1]') + >>> headers, output = SimpleHandler(dataset2).dds() + >>> print ''.join(output) + Dataset { + Int32 array[2][2]; + } foo; + <BLANKLINE> + >>> print dataset2['array'].data + [[1 2] + [4 5]] + + The same is valid for grids: + + >>> dataset['grid'] = GridType(name='grid') + >>> data = array([1,2,3,4,5,6]) + >>> data.shape = (2,3) + >>> dataset['grid'].array = ArrayType(name='grid', data=data, shape=(2,3), dimensions=('x', 'y')) + >>> dataset['grid'].maps['x'] = ArrayType(name='x', data=array([1,2]), shape=(2,)) + >>> dataset['grid'].maps['y'] = ArrayType(name='y', data=array([1,2,3]), shape=(3,)) + >>> dataset._set_id() + >>> headers, output = SimpleHandler(dataset).dds() + >>> print ''.join(output) + Dataset { + Int32 array[2][3]; + Grid { + Array: + Int32 grid[x = 2][y = 3]; + Maps: + Int32 x[x = 2]; + Int32 y[y = 3]; + } grid; + } foo; + <BLANKLINE> + >>> dataset2 = constrain(dataset, 'grid[0:1:0][0:1:0]') + >>> headers, output = SimpleHandler(dataset2).dds() + >>> print ''.join(output) + Dataset { + Grid { + Array: + Int32 grid[x = 1][y = 1]; + Maps: + Int32 x[x = 1]; + Int32 y[y = 1]; + } grid; + } foo; + <BLANKLINE> + >>> headers, output = SimpleHandler(dataset2).ascii() + >>> print ''.join(output) + Dataset { + Grid { + Array: + Int32 grid[x = 1][y = 1]; + Maps: + Int32 x[x = 1]; + Int32 y[y = 1]; + } grid; + } foo; + --------------------------------------------- + grid.grid + [0][0] 1 + <BLANKLINE> + grid.x + [0] 1 + <BLANKLINE> + grid.y + [0] 1 + <BLANKLINE> + <BLANKLINE> + <BLANKLINE> + <BLANKLINE> + + Selecting a map from a Grid should return a structure: + + >>> dataset3 = constrain(dataset, 'grid.x') + >>> headers, output = SimpleHandler(dataset3).dds() + >>> print ''.join(output) + Dataset { + Structure { + Int32 x[x = 2]; + } grid; + } foo; + <BLANKLINE> + + Short notation also works: + + >>> dataset3 = constrain(dataset, 'x') + >>> headers, output = SimpleHandler(dataset3).dds() + >>> print ''.join(output) + Dataset { + Structure { + Int32 x[x = 2]; + } grid; + } foo; + <BLANKLINE> + + It also works with Sequences: + + >>> dataset = DatasetType(name='foo') + >>> dataset['seq'] = SequenceType(name='seq') + >>> dataset['seq']['a'] = BaseType(name='a') + >>> dataset['seq']['b'] = BaseType(name='b') + >>> dataset['seq']['a'].data = range(5) + >>> dataset['seq']['b'].data = range(5,10) + >>> for i in dataset['seq'].data: + ... print i + (0, 5) + (1, 6) + (2, 7) + (3, 8) + (4, 9) + >>> dataset2 = constrain(dataset, 'seq.a') + >>> for i in dataset2['seq'].data: + ... print i + (0,) + (1,) + (2,) + (3,) + (4,) + >>> dataset2 = constrain(dataset, 'seq.b') + >>> for i in dataset2['seq'].data: + ... print i + (5,) + (6,) + (7,) + (8,) + (9,) + >>> dataset2 = constrain(dataset, 'seq.b,seq.a') + >>> for i in dataset2['seq'].data: + ... print i + (5, 0) + (6, 1) + (7, 2) + (8, 3) + (9, 4) + + The function also parses selection expressions. Let's create a + dataset with sequential data: + + >>> dataset = DatasetType(name='foo') + >>> dataset['seq'] = SequenceType(name='seq') + >>> dataset['seq']['index'] = BaseType(name='index', type='Int32') + >>> dataset['seq']['index'].data = [10, 11, 12, 13] + >>> dataset['seq']['temperature'] = BaseType(name='temperature', type='Float32') + >>> dataset['seq']['temperature'].data = [17.2, 15.1, 15.3, 15.1] + >>> dataset['seq']['site'] = BaseType(name='site', type='String') + >>> dataset['seq']['site'].data = ['Diamond_St', 'Blacktail_Loop', 'Platinum_St', 'Kodiak_Trail'] + + Here's the data: + + >>> for i in dataset['seq'].data: + ... print i + (10, 17.199999999999999, 'Diamond_St') + (11, 15.1, 'Blacktail_Loop') + (12, 15.300000000000001, 'Platinum_St') + (13, 15.1, 'Kodiak_Trail') + + Now suppose we only want data where ``index`` is greater than 11: + + >>> dataset2 = constrain(dataset, 'seq&seq.index>11') + >>> for i in dataset2['seq'].data: + ... print i + (12, 15.300000000000001, 'Platinum_St') + (13, 15.1, 'Kodiak_Trail') + + We can request only a few variables: + + >>> dataset2 = constrain(dataset, 'seq.site&seq.index>11') + >>> for i in dataset2['seq'].data: + ... print i + ('Platinum_St',) + ('Kodiak_Trail',) + + A few more tests: + + >>> dataset = DatasetType(name='foo') + >>> dataset['a'] = StructureType(name='a') + >>> dataset['a']['shn'] = BaseType(name='shn') + >>> dataset['b'] = StructureType(name='b') + >>> dataset['b']['shn'] = BaseType(name='shn') + >>> dataset2 = constrain(dataset, 'a.shn') + >>> print dataset2 #doctest: +ELLIPSIS + {'a': {'shn': <dap.dtypes.BaseType object at ...>}} + >>> dataset3 = constrain(dataset, 'shn') + Traceback (most recent call last): + ... + ConstraintExpressionError: 'Ambiguous shorthand notation request: shn' + >>> dataset['shn'] = BaseType(name='shn') + >>> dataset3 = constrain(dataset, 'shn') + >>> print dataset3 #doctest: +ELLIPSIS + {'shn': <dap.dtypes.BaseType object at 0x1746290>} + """ + # Parse constraints. + fields, queries = parse_querystring(constraints) + + # Ids and names are used to check that requests made using the + # shorthand notation are not ambiguous. Used names are stored to + # make sure that at most only a single variables is returned from + # a given name. + ids = [var.id for var in walk(dataset)] + names = [] + + new = DatasetType(name=dataset.name, attributes=dataset.attributes.copy()) + new = build(dataset, new, fields, queries, ids, names) + + return new + + +def build(dapvar, new, fields, queries, ids, names): + vars_ = fields.keys() + order = [] + for var in dapvar.walk(): + # Make a copy of the variable, so that later we can possibly add it + # to the dataset we're building (that's why it's a candidate). + candidate = copy.deepcopy(var) + + # We first filter the data in sequences. This has to be done + # before variables are removed, since we can select values based + # on conditions on *other* variables. Eg: seq.a where seq.b > 1 + if queries and isinstance(candidate, SequenceType): + # Filter candidate on the server-side, since the data may be + # proxied using ``dap.proxy.Proxy``. + candidate = candidate.filter(*queries) + # And then filter on the client side. + candidate = filter_(candidate, queries) + + # If the variable was requested, either by id or name, or if no + # variables were requested, we simply add this candidate to the + # dataset we're building. + if not vars_ or candidate.id in vars_ or (candidate.name in vars_ and candidate.name not in ids): + new[candidate.name] = candidate + + # Check if requests done using shn are not ambiguous. + if vars_ and candidate.id not in vars_: # request by shn + if candidate.name in names: + raise ConstraintExpressionError("Ambiguous shorthand notation request: %s" % candidate.name) + names.append(candidate.name) + + # We also need to store the order in which the variables were + # requested. Later, we'll rearrange the variables in our built + # dataset in the correct order. + if vars_: + if candidate.id in vars_: index = vars_.index(candidate.id) + else: index = vars_.index(candidate.name) + order.append((index, candidate.name)) + + # If the variable was not requested, but it's a constructor, it's + # possible that one of its children has been requested. We apply + # the algorithm recursively on the variable. + elif not isinstance(var, BaseType): + # We clear the candidate after storing a copy with the filtered + # data and children. We will then append the requested children + # to the cleared candidate. + ccopy = copy.deepcopy(candidate) + if isinstance(candidate, StructureType): + candidate.clear() + else: + # If the variable is a grid we should return it as a + # structure with the requested fields. + parent = candidate._id[:-len(candidate.name)-1] + candidate = StructureType(name=candidate.name, attributes=candidate.attributes.copy()) + candidate._set_id(parent) + + # Check for requested children. + candidate = build(ccopy, candidate, fields, queries, ids, names) + + # If the candidate has any keys, ie, stored variables, we add + # it to the dataset we are building. + if candidate.keys(): new[candidate.name] = candidate + + # Check if we need to apply a slice in the variable. + slice_ = fields.get(candidate.id) or fields.get(candidate.name) + if slice_: candidate = slicevar(candidate, slice_) + + # Sort variables according to order of requested variables. + if len(order) > 1: + order.sort() + new._keys = [item[1] for item in order] + + return new + + +def filter_(dapvar, queries): + # Get only the queries related to this variable. + queries_ = [q for q in queries if q.startswith(dapvar.id)] + if queries_: + # Build the filter and apply it to the data. + ids = [var.id for var in dapvar.values()] + f = buildfilter(queries_, ids) + data = itertools.ifilter(f, dapvar.data) + + # Set the data in the stored variables. + data = list(data) + dapvar.data = data + + return dapvar + + +def slicevar(dapvar, slice_): + if slice_ != (slice(None),): + dapvar.data = dapvar.data[slice_] + + try: + dapvar.shape = getattr(dapvar.data, 'shape', (len(dapvar.data),)) + except TypeError: + pass + + if isinstance(dapvar, GridType): + if not isiterable(slice_): slice_ = (slice_,) + + # Slice the maps. + for map_,mapslice in zip(dapvar.maps.values(), slice_): + map_.data = map_.data[mapslice] + map_.shape = map_.data.shape + + return dapvar + + +def order(dataset, fields): + """ + Order a given dataset according to the requested order. + + >>> d = DatasetType(name='d') + >>> d['a'] = BaseType(name='a') + >>> d['b'] = BaseType(name='b') + >>> d['c'] = SequenceType(name='c') + >>> d['c']['d'] = BaseType(name='d') + >>> d['c']['e'] = BaseType(name='e') + >>> print order(d, 'b,c.e,c.d,a'.split(',')) #doctest: +ELLIPSIS + {'b': <dap.dtypes.BaseType object at ...>, 'c': {'e': <dap.dtypes.BaseType object at ...>, 'd': <dap.dtypes.BaseType object at ...>}, 'a': <dap.dtypes.BaseType object at ...>} + >>> print order(d, 'c.e,c.d,a'.split(',')) #doctest: +ELLIPSIS + {'c': {'e': <dap.dtypes.BaseType object at ...>, 'd': <dap.dtypes.BaseType object at ...>}, 'a': <dap.dtypes.BaseType object at ...>, 'b': <dap.dtypes.BaseType object at ...>} + >>> print order(d, 'b,c,a'.split(',')) #doctest: +ELLIPSIS + {'b': <dap.dtypes.BaseType object at ...>, 'c': {'d': <dap.dtypes.BaseType object at ...>, 'e': <dap.dtypes.BaseType object at ...>}, 'a': <dap.dtypes.BaseType object at ...>} + """ + # Order the dataset. + dataset = copy.copy(dataset) + orders = [] + n = len(dataset._keys) + for var in dataset.walk(): + # Search for id first. + fields_ = [field[:len(var.id)] for field in fields] + if var.id in fields_: index = fields_.index(var.id) + # Else search by name. + elif var.name in fields: index = fields.index(var.name) + # Else preserve original order. + else: index = n + dataset._keys.index(var.name) + orders.append((index, var.name)) + + # Sort children. + if isinstance(var, StructureType): + dataset[var.name] = order(var, fields) + + # Sort dataset. + if len(orders) > 1: + orders.sort() + dataset._keys = [item[1] for item in orders] + + return dataset + + +def walk(dapvar): + """ + Iterate over all variables, including dapvar. + """ + yield dapvar + try: + for child in dapvar.walk(): + for var in walk(child): yield var + except: + pass + + +def getslice(hyperslab, shape=None): + """Parse a hyperslab. + + Parse a hyperslab to a slice according to variable shape. The hyperslab + follows the DAP specification, and ommited dimensions are returned in + their entirety. + + >>> getslice('[0:1:2][0:1:2]') + (slice(0, 3, 1), slice(0, 3, 1)) + >>> getslice('[0:2][0:2]') + (slice(0, 3, 1), slice(0, 3, 1)) + >>> getslice('[0][2]') + (slice(0, 1, 1), slice(2, 3, 1)) + >>> getslice('[0:1:1]') + (slice(0, 2, 1),) + >>> getslice('[0:2:1]') + (slice(0, 2, 2),) + >>> getslice('') + (slice(None, None, None),) + """ + # Backwards compatibility. In pydap <= 2.2.3 the ``fields`` dict from + # helper.parse_querystring returned the slices as strings (instead of + # Python slices). These strings had to be passed to getslice to get a + # Python slice. Old plugins still do this, but with pydap >= 2.2.4 + # they are already passing the slices, so we simply return them. + if not isinstance(hyperslab, basestring): return hyperslab or slice(None) + + if hyperslab: + output = [] + dimslices = hyperslab[1:-1].split('][') + for dimslice in dimslices: + start, size, step = _getsize(dimslice) + output.append(slice(start, start+size, step)) + output = tuple(output) + else: + output = (slice(None),) + + return output + + +def _getsize(dimslice): + """Parse a dimension from a hyperslab. + + Calculates the start, size and step from a DAP formatted hyperslab. + + >>> _getsize('0:1:9') + (0, 10, 1) + >>> _getsize('0:2:9') + (0, 10, 2) + >>> _getsize('0') + (0, 1, 1) + >>> _getsize('0:9') + (0, 10, 1) + """ + size = dimslice.split(':') + + start = int(size[0]) + if len(size) == 1: + stop = start + step = 1 + elif len(size) == 2: + stop = int(size[1]) + step = 1 + elif len(size) == 3: + step = int(size[1]) + stop = int(size[2]) + else: + raise ConstraintExpressionError('Invalid hyperslab: %s.' % dimslice) + size = (stop-start) + 1 + + return start, size, step + + +def buildfilter(queries, vars_): + """This function is a filter builder. + + Given a list of DAP formatted queries and a list of variable names, + this function returns a dynamic filter function to filter rows. + + From the example in the DAP specification: + + >>> vars_ = ['index', 'temperature', 'site'] + >>> data = [] + >>> data.append([10, 17.2, 'Diamond_St']) + >>> data.append([11, 15.1, 'Blacktail_Loop']) + >>> data.append([12, 15.3, 'Platinum_St']) + >>> data.append([13, 15.1, 'Kodiak_Trail']) + + Rows where index is greater-than-or-equal 11: + + >>> f = buildfilter(['index>=11'], vars_) + >>> for line in itertools.ifilter(f, data): + ... print line + [11, 15.1, 'Blacktail_Loop'] + [12, 15.300000000000001, 'Platinum_St'] + [13, 15.1, 'Kodiak_Trail'] + + Rows where site ends with '_St': + + >>> f = buildfilter(['site=~".*_St"'], vars_) + >>> for line in itertools.ifilter(f, data): + ... print line + [10, 17.199999999999999, 'Diamond_St'] + [12, 15.300000000000001, 'Platinum_St'] + + Index greater-or-equal-than 11 AND site ends with '_St': + + >>> f = buildfilter(['site=~".*_St"', 'index>=11'], vars_) + >>> for line in itertools.ifilter(f, data): + ... print line + [12, 15.300000000000001, 'Platinum_St'] + + Site is either 'Diamond_St' OR 'Blacktail_Loop': + + >>> f = buildfilter(['site={"Diamond_St", "Blacktail_Loop"}'], vars_) + >>> for line in itertools.ifilter(f, data): + ... print line + [10, 17.199999999999999, 'Diamond_St'] + [11, 15.1, 'Blacktail_Loop'] + + Index is either 10 OR 12: + + >>> f = buildfilter(['index={10, 12}'], vars_) + >>> for line in itertools.ifilter(f, data): + ... print line + [10, 17.199999999999999, 'Diamond_St'] + [12, 15.300000000000001, 'Platinum_St'] + + Python is great, isn't it? :) + """ + filters = [] + p = re.compile(r'''^ # Start of selection + {? # Optional { for multi-valued constants + (?P<var1>.*?) # Anything + }? # Closing } + (?P<op><=|>=|!=|=~|>|<|=) # Operators + {? # { + (?P<var2>.*?) # Anything + }? # } + $ # EOL + ''', re.VERBOSE) + for query in queries: + m = p.match(query) + if not m: raise ConstraintExpressionError('Invalid constraint expression: %s.' % query) + + # Functions associated with each operator. + op = {'<' : operator.lt, + '>' : operator.gt, + '!=': operator.ne, + '=' : operator.eq, + '>=': operator.ge, + '<=': operator.le, + '=~': lambda a,b: re.match(b,a), + }[m.group('op')] + # Allow multiple comparisons in one line. Python rulez! + op = multicomp(op) + + # Build the filter for the first variable. + if m.group('var1') in vars_: + i = vars_.index(m.group('var1')) + var1 = lambda L, i=i: operator.getitem(L, i) + + # Build the filter for the second variable. It could be either + # a name or a constant. + if m.group('var2') in vars_: + i = vars_.index(m.group('var2')) + var2 = lambda L, i=i: operator.getitem(L, i) + else: + var2 = lambda x, m=m: expr_eval(m.group('var2')) + + # This is the filter. We apply the function (op) to the variable + # filters (var1 and var2). + filter0 = lambda x, op=op, var1=var1, var2=var2: op(var1(x), var2(x)) + filters.append(filter0) + + if filters: + # We have to join all the filters that were built, using the AND + # operator. Believe me, this line does exactly that. + # + # You are not expected to understand this. + filter0 = lambda i: reduce(lambda x,y: x and y, [f(i) for f in filters]) + else: + filter0 = bool + + return filter0 + + +def multicomp(function): + """Multiple OR comparisons. + + Given f(a,b), this function returns a new function g(a,b) which + performs multiple OR comparisons if b is a tuple. + + >>> a = 1 + >>> b = (0, 1, 2) + >>> operator.lt = multicomp(operator.lt) + >>> operator.lt(a, b) + True + """ + def f(a, b): + if isinstance(b, tuple): + for i in b: + # Return True if any comparison is True. + if function(a, i): return True + return False + else: + return function(a, b) + + return f + + +def fix_slice(dims, index): + """Fix incomplete slices or slices with ellipsis. + + The behaviour of this function was reversed-engineered from numpy. + + >>> fix_slice(3, (0, Ellipsis, 0)) + (0, slice(None, None, None), 0) + >>> fix_slice(4, (0, Ellipsis, 0)) + (0, slice(None, None, None), slice(None, None, None), 0) + >>> fix_slice(4, (0, 0, Ellipsis, 0)) + (0, 0, slice(None, None, None), 0) + >>> fix_slice(5, (0, Ellipsis, 0)) + (0, slice(None, None, None), slice(None, None, None), slice(None, None, None), 0) + >>> fix_slice(5, (0, 0, Ellipsis, 0)) + (0, 0, slice(None, None, None), slice(None, None, None), 0) + >>> fix_slice(5, (0, Ellipsis, 0, Ellipsis)) + (0, slice(None, None, None), slice(None, None, None), 0, slice(None, None, None)) + >>> fix_slice(4, slice(None, None, None)) + (slice(None, None, None), slice(None, None, None), slice(None, None, None), slice(None, None, None)) + >>> fix_slice(4, (slice(None, None, None), 0)) + (slice(None, None, None), 0, slice(None, None, None), slice(None, None, None)) + """ + if not isinstance(index, tuple): index = (index,) + + out = [] + length = len(index) + for slice_ in index: + if slice_ is Ellipsis: + out.extend([slice(None)] * (dims - length + 1)) + length += (dims - length) + else: + out.append(slice_) + index = tuple(out) + + if len(index) < dims: + index += (slice(None),) * (dims - len(index)) + + return index + + +def lenslice(slice_): + """ + Return the number of values associated with a slice. + + By Bob Drach. + """ + step = slice_.step + if step is None: step = 1 + + if step > 0: + start = slice_.start + stop = slice_.stop + else: + start = slice_.stop + stop = slice_.start + step = -step + return ((stop-start-1)/step + 1) + + +def parse_querystring(query): + """ + Parse a query_string returning the requested variables, dimensions, and CEs. + + >>> parse_querystring('a,b') + ({'a': (slice(None, None, None),), 'b': (slice(None, None, None),)}, []) + >>> parse_querystring('a[0],b[1]') + ({'a': (slice(0, 1, 1),), 'b': (slice(1, 2, 1),)}, []) + >>> parse_querystring('a[0],b[1]&foo.bar>1') + ({'a': (slice(0, 1, 1),), 'b': (slice(1, 2, 1),)}, ['foo.bar>1']) + >>> parse_querystring('a[0],b[1]&foo.bar>1&LAYERS=SST') + ({'a': (slice(0, 1, 1),), 'b': (slice(1, 2, 1),)}, ['foo.bar>1', 'LAYERS=SST']) + >>> parse_querystring('foo.bar>1&LAYERS=SST') + ({}, ['foo.bar>1', 'LAYERS=SST']) + + """ + if query is None: return {}, [] + + query = unquote(query) + constraints = query.split('&') + + # Check if the first item is either a list of variables (projection) + # or a selection. + relops = ['<', '<=', '>', '>=', '=', '!=',' =~'] + for relop in relops: + if relop in constraints[0]: + vars_ = [] + queries = constraints[:] + break + else: + vars_ = constraints[0].split(',') + queries = constraints[1:] + + fields = odict() + p = re.compile(r'(?P<name>[^[]+)(?P<shape>(\[[^\]]+\])*)') + for var in vars_: + if var: + # Check if the var has a slice. + c = p.match(var).groupdict() + id_ = quote(c['name']) + fields[id_] = getslice(c['shape']) + + return fields, queries + + +def escape_dods(dods, pad=''): + """ + Escape a DODS response. + + This is useful for debugging. You're probably spending too much time + with pydap if you need to use this. + """ + if 'Data:\n' in dods: + index = dods.index('Data:\n') + len('Data:\n') + else: + index = 0 + + dds = dods[:index] + dods = dods[index:] + + out = [] + for i, char in enumerate(dods): + char = hex(ord(char)) + char = char.replace('0x', '\\x') + if len(char) < 4: char = char.replace('\\x', '\\x0') + out.append(char) + if pad and (i%4 == 3): out.append(pad) + out = ''.join(out) + out = out.replace(r'\x5a\x00\x00\x00', '<start of sequence>') + out = out.replace(r'\xa5\x00\x00\x00', '<end of sequence>\n') + return dds + out + + +def _test(): + import doctest + doctest.testmod() + +if __name__ == "__main__": + _test() Added: trunk/toolkits/basemap/lib/dap/lib.py =================================================================== --- trunk/toolkits/basemap/lib/dap/lib.py (rev 0) +++ trunk/toolkits/basemap/lib/dap/lib.py 2007-11-26 18:29:14 UTC (rev 4449) @@ -0,0 +1,122 @@ +from __future__ import division + +"""Basic functions concerning the DAP. + +These functions are mostly related to encoding data according to the DAP. +""" + +from urllib import quote as _quote + +__author__ = 'Roberto De Almeida <ro...@py...>' +__version__ = (2,2,6,1) # module version +__dap__ = (2,0) # protocol version + +# Constants that used to live in __init__.py but had to be moved +# because we share the namespace with plugins and responses. +USER_AGENT = 'pydap/%s' % '.'.join([str(_) for _ in __version__]) +INDENT = ' ' * 4 +VERBOSE = False +CACHE = None +TIMEOUT = None + + +def isiterable(o): + """Tests if an object is iterable. + + >>> print isiterable(range(10)) + True + >>> print isiterable({}) + True + >>> def a(): + ... for i in range(10): yield i + >>> print isiterable(a()) + True + >>> print isiterable('string') + False + >>> print isiterable(1) + False + """ + # We DON'T want to iterate over strings. + if isinstance(o, basestring): return False + + try: + iter(o) + return True + except TypeError: + return False + + +def to_list(L): + if hasattr(L, 'tolist'): return L.tolist() # shortcut for numpy arrays + elif isiterable(L): return [to_list(item) for item in L] + else: return L + + +def quote(name): + """Extended quote for the DAP spec. + + The period MUST be escaped in names (DAP spec, item 5.1): + + >>> quote("White space") + 'White%20space' + >>> _quote("Period.") + 'Period.' + >>> quote("Period.") + 'Period%2E' + """ + return _quote(name).replace('.', '%2E') + + +def encode_atom(atom): + r"""Atomic types encoding. + + Encoding atomic types for the DAS. Integers should be printed using the + base 10 ASCII representation of its value: + ... [truncated message content] |
From: <js...@us...> - 2007-11-26 18:58:55
|
Revision: 4451 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4451&view=rev Author: jswhit Date: 2007-11-26 10:58:41 -0800 (Mon, 26 Nov 2007) Log Message: ----------- add httplib2, since dap requires it. Modified Paths: -------------- trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/setup.py Added Paths: ----------- trunk/toolkits/basemap/lib/httplib2/ trunk/toolkits/basemap/lib/httplib2/__init__.py trunk/toolkits/basemap/lib/httplib2/iri2uri.py Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2007-11-26 18:48:01 UTC (rev 4450) +++ trunk/toolkits/basemap/MANIFEST.in 2007-11-26 18:58:41 UTC (rev 4451) @@ -75,6 +75,7 @@ include MANIFEST.in recursive-include geos-2.2.3 * recursive-include lib/dap * +recursive-include lib/httplib2 * recursive-include lib/dbflib * recursive-include lib/shapelib * include lib/matplotlib/toolkits/basemap/data/5minmask.bin Added: trunk/toolkits/basemap/lib/httplib2/__init__.py =================================================================== --- trunk/toolkits/basemap/lib/httplib2/__init__.py (rev 0) +++ trunk/toolkits/basemap/lib/httplib2/__init__.py 2007-11-26 18:58:41 UTC (rev 4451) @@ -0,0 +1,1123 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (jo...@bi...)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.b...@lt...)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "$Rev: 259 $" + +import re +import sys +import md5 +import email +import email.Utils +import email.Message +import StringIO +import gzip +import zlib +import httplib +import urlparse +import base64 +import os +import copy +import calendar +import time +import random +import sha +import hmac +from gettext import gettext as _ +import socket + +try: + import socks +except ImportError: + socks = None + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +__all__ = ['Http', 'Response', 'ProxyInfo', 'HttpLib2Error', + 'RedirectMissingLocation', 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = md5.new(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip() for x in part.split("=")]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x08\x0A-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?<!\")[^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(?<!\")[^ \t\r\n,]+(?!\"))\"?)(.*)$") +UNQUOTE_PAIRS = re.compile(r'\\(.)') +def _parse_www_authenticate(headers, headername='www-authenticate'): + """Returns a dictionary of dictionaries, one dict + per auth_scheme.""" + retval = {} + if headers.has_key(headername): + authenticate = headers[headername].strip() + www_auth = USE_WWW_AUTH_STRICT_PARSING and WWW_AUTH_STRICT or WWW_AUTH_RELAXED + while authenticate: + # Break off the scheme at the beginning of the line + if headername == 'authentication-info': + (auth_scheme, the_rest) = ('digest', authenticate) + else: + (auth_scheme, the_rest) = authenticate.split(" ", 1) + # Now loop over all the key value pairs that come after the scheme, + # being careful not to roll into the next scheme + match = www_auth.search(the_rest) + auth_params = {} + while match: + if match and len(match.groups()) == 3: + (key, value, the_rest) = match.groups() + auth_params[key.lower()] = UNQUOTE_PAIRS.sub(r'\1', value) # '\\'.join([x.replace('\\', '') for x in value.split('\\\\')]) + match = www_auth.search(the_rest) + retval[auth_scheme.lower()] = auth_params + authenticate = the_rest.strip() + return retval + + +def _entry_disposition(response_headers, request_headers): + """Determine freshness from the Date, Expires and Cache-Control headers. + + We don't handle the following: + + 1. Cache-Control: max-stale + 2. Age: headers are not used in the calculations. + + Not that this algorithm is simpler than you might think + because we are operating as a private (non-shared) cache. + This lets us ignore 's-maxage'. We can also ignore + 'proxy-invalidate' since we aren't a proxy. + We will never return a stale document as + fresh as a design decision, and thus the non-implementation + of 'max-stale'. This also lets us safely ignore 'must-revalidate' + since we operate as if every server has sent 'must-revalidate'. + Since we are private we get to ignore both 'public' and + 'private' parameters. We also ignore 'no-transform' since + we don't do any transformations. + The 'no-store' parameter is handled at a higher level. + So the only Cache-Control parameters we look at are: + + no-cache + only-if-cached + max-age + min-fresh + """ + + retval = "STALE" + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + + if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1: + retval = "TRANSPARENT" + if 'cache-control' not in request_headers: + request_headers['cache-control'] = 'no-cache' + elif cc.has_key('no-cache'): + retval = "TRANSPARENT" + elif cc_response.has_key('no-cache'): + retval = "STALE" + elif cc.has_key('only-if-cached'): + retval = "FRESH" + elif response_headers.has_key('date'): + date = calendar.timegm(email.Utils.parsedate_tz(response_headers['date'])) + now = time.time() + current_age = max(0, now - date) + if cc_response.has_key('max-age'): + try: + freshness_lifetime = int(cc_response['max-age']) + except ValueError: + freshness_lifetime = 0 + elif response_headers.has_key('expires'): + expires = email.Utils.parsedate_tz(response_headers['expires']) + if None == expires: + freshness_lifetime = 0 + else: + freshness_lifetime = max(0, calendar.timegm(expires) - date) + else: + freshness_lifetime = 0 + if cc.has_key('max-age'): + try: + freshness_lifetime = int(cc['max-age']) + except ValueError: + freshness_lifetime = 0 + if cc.has_key('min-fresh'): + try: + min_fresh = int(cc['min-fresh']) + except ValueError: + min_fresh = 0 + current_age += min_fresh + if freshness_lifetime > current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % response_headers.status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(?<!\r)\n", "\r\n", header_str) + text = "".join([status_header, header_str, content]) + + cache.set(cachekey, text) + +def _cnonce(): + dig = md5.new("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest() + return dig[:16] + +def _wsse_username_token(cnonce, iso_now, password): + return base64.encodestring(sha.new("%s%s%s" % (cnonce, iso_now, password)).digest()).strip() + + +# For credentials we need two things, first +# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.) +# Then we also need a list of URIs that have already demanded authentication +# That list is tricky since sub-URIs can take the same auth, or the +# auth scheme may change as you descend the tree. +# So we also need each Auth instance to be able to tell us +# how close to the 'top' it is. + +class Authentication(object): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + self.path = path + self.host = host + self.credentials = credentials + self.http = http + + def depth(self, request_uri): + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return request_uri[len(self.path):].count("/") + + def inscope(self, host, request_uri): + # XXX Should we normalize the request_uri? + (scheme, authority, path, query, fragment) = parse_uri(request_uri) + return (host == self.host) and path.startswith(self.path) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header. Over-rise this in sub-classes.""" + pass + + def response(self, response, content): + """Gives us a chance to update with new nonces + or such returned from the last authorized response. + Over-rise this in sub-classes if necessary. + + Return TRUE is the request is to be retried, for + example Digest may return stale=true. + """ + return False + + + +class BasicAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'Basic ' + base64.encodestring("%s:%s" % self.credentials).strip() + + +class DigestAuthentication(Authentication): + """Only do qop='auth' and MD5, since that + is all Apache currently implements""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['digest'] + qop = self.challenge.get('qop') + self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None + if self.challenge['qop'] is None: + raise UnimplementedDigestAuthOptionError( _("Unsupported value for qop: %s." % qop)) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'MD5') + if self.challenge['algorithm'] != 'MD5': + raise UnimplementedDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]]) + self.challenge['nc'] = 1 + + def request(self, method, request_uri, headers, content, cnonce = None): + """Modify the request headers""" + H = lambda x: md5.new(x).hexdigest() + KD = lambda s, d: H("%s:%s" % (s, d)) + A2 = "".join([method, ":", request_uri]) + self.challenge['cnonce'] = cnonce or _cnonce() + request_digest = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], + '%08x' % self.challenge['nc'], + self.challenge['cnonce'], + self.challenge['qop'], H(A2) + )) + headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['nonce'], + request_uri, + self.challenge['algorithm'], + request_digest, + self.challenge['qop'], + self.challenge['nc'], + self.challenge['cnonce'], + ) + self.challenge['nc'] += 1 + + def response(self, response, content): + if not response.has_key('authentication-info'): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('digest', {}) + if 'true' == challenge.get('stale'): + self.challenge['nonce'] = challenge['nonce'] + self.challenge['nc'] = 1 + return True + else: + updated_challenge = _parse_www_authenticate(response, 'authentication-info').get('digest', {}) + + if updated_challenge.has_key('nextnonce'): + self.challenge['nonce'] = updated_challenge['nextnonce'] + self.challenge['nc'] = 1 + return False + + +class HmacDigestAuthentication(Authentication): + """Adapted from Robert Sayre's code and DigestAuthentication above.""" + __author__ = "Thomas Broyer (t.b...@lt...)" + + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + self.challenge = challenge['hmacdigest'] + # TODO: self.challenge['domain'] + self.challenge['reason'] = self.challenge.get('reason', 'unauthorized') + if self.challenge['reason'] not in ['unauthorized', 'integrity']: + self.challenge['reason'] = 'unauthorized' + self.challenge['salt'] = self.challenge.get('salt', '') + if not self.challenge.get('snonce'): + raise UnimplementedHmacDigestAuthOptionError( _("The challenge doesn't contain a server nonce, or this one is empty.")) + self.challenge['algorithm'] = self.challenge.get('algorithm', 'HMAC-SHA-1') + if self.challenge['algorithm'] not in ['HMAC-SHA-1', 'HMAC-MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for algorithm: %s." % self.challenge['algorithm'])) + self.challenge['pw-algorithm'] = self.challenge.get('pw-algorithm', 'SHA-1') + if self.challenge['pw-algorithm'] not in ['SHA-1', 'MD5']: + raise UnimplementedHmacDigestAuthOptionError( _("Unsupported value for pw-algorithm: %s." % self.challenge['pw-algorithm'])) + if self.challenge['algorithm'] == 'HMAC-MD5': + self.hashmod = md5 + else: + self.hashmod = sha + if self.challenge['pw-algorithm'] == 'MD5': + self.pwhashmod = md5 + else: + self.pwhashmod = sha + self.key = "".join([self.credentials[0], ":", + self.pwhashmod.new("".join([self.credentials[1], self.challenge['salt']])).hexdigest().lower(), + ":", self.challenge['realm'] + ]) + self.key = self.pwhashmod.new(self.key).hexdigest().lower() + + def request(self, method, request_uri, headers, content): + """Modify the request headers""" + keys = _get_end2end_headers(headers) + keylist = "".join(["%s " % k for k in keys]) + headers_val = "".join([headers[k] for k in keys]) + created = time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime()) + cnonce = _cnonce() + request_digest = "%s:%s:%s:%s:%s" % (method, request_uri, cnonce, self.challenge['snonce'], headers_val) + request_digest = hmac.new(self.key, request_digest, self.hashmod).hexdigest().lower() + headers['Authorization'] = 'HMACDigest username="%s", realm="%s", snonce="%s", cnonce="%s", uri="%s", created="%s", response="%s", headers="%s"' % ( + self.credentials[0], + self.challenge['realm'], + self.challenge['snonce'], + cnonce, + request_uri, + created, + request_digest, + keylist, + ) + + def response(self, response, content): + challenge = _parse_www_authenticate(response, 'www-authenticate').get('hmacdigest', {}) + if challenge.get('reason') in ['integrity', 'stale']: + return True + return False + + +class WsseAuthentication(Authentication): + """This is thinly tested and should not be relied upon. + At this time there isn't any third party server to test against. + Blogger and TypePad implemented this algorithm at one point + but Blogger has since switched to Basic over HTTPS and + TypePad has implemented it wrong, by never issuing a 401 + challenge but instead requiring your client to telepathically know that + their endpoint is expecting WSSE profile="UsernameToken".""" + def __init__(self, credentials, host, request_uri, headers, response, content, http): + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['Authorization'] = 'WSSE profile="UsernameToken"' + iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) + cnonce = _cnonce() + password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1]) + headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % ( + self.credentials[0], + password_digest, + cnonce, + iso_now) + +class GoogleLoginAuthentication(Authentication): + def __init__(self, credentials, host, request_uri, headers, response, content, http): + from urllib import urlencode + Authentication.__init__(self, credentials, host, request_uri, headers, response, content, http) + challenge = _parse_www_authenticate(response, 'www-authenticate') + service = challenge['googlelogin'].get('service', 'xapi') + # Bloggger actually returns the service in the challenge + # For the rest we guess based on the URI + if service == 'xapi' and request_uri.find("calendar") > 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +def _md5(s): + return + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "r") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "w") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + def __init__(self, proxy_type, proxy_host, proxy_port, proxy_rdns=None, proxy_user=None, proxy_pass=None): + """The parameter proxy_type must be set to one of socks.PROXY_TYPE_XXX + constants. For example: + +p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host='localhost', proxy_port=8000) + """ + self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, self.proxy_user, self.proxy_pass = proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, self.proxy_rdns, + self.proxy_user, self.proxy_pass) + + def isgood(self): + return socks and (self.proxy_host != None) and (self.proxy_port != None) + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """HTTPConnection subclass that supports timeouts""" + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + msg = "getaddrinfo returns an empty list" + for res in socket.getaddrinfo(self.host, self.port, 0, + socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if self.proxy_info and self.proxy_info.isgood(): + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(*self.proxy_info.astuple()) + else: + self.sock = socket.socket(af, socktype, proto) + # Different from httplib: support timeouts. + if self.timeout is not None: + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + self.sock.connect(sa) + except socket.error, msg: + if self.debuglevel > 0: + print 'connect fail:', (self.host, self.port) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + "This class allows communication via SSL." + + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None): + self.timeout = timeout + self.proxy_info = proxy_info + httplib.HTTPSConnection.__init__(self, host, port=port, key_file=key_file, + cert_file=cert_file, strict=strict) + + def connect(self): + "Connect to a host on a given (SSL) port." + + if self.proxy_info and self.proxy_info.isgood(): + self.sock.setproxy(*self.proxy_info.astuple()) + sock.setproxy(*self.proxy_info.astuple()) + else: + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + if self.timeout is not None: + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + ssl = socket.ssl(sock, self.key_file, self.cert_file) + self.sock = httplib.FakeSocket(sock, ssl) + + + +class Http(object): + """An HTTP client that handles: +- all methods +- caching +- ETags +- compression, +- HTTPS +- Basic +- Digest +- WSSE + +and more. + """ + def __init__(self, cache=None, timeout=None, proxy_info=None): + """The value of proxy_info is a ProxyInfo instance. + +If 'cache' is a string then it is used as a directory name +for a disk cache. Otherwise it must be an object that supports +the same interface as FileCache.""" + self.proxy_info = proxy_info + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, str): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + for i in range(2): + try: + conn.request(method, request_uri, body, headers) + response = conn.getresponse() + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except httplib.HTTPException, e: + if i == 0: + conn.close() + conn.connect() + continue + else: + raise + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + + break; + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = ((response.status == 303) and (method not in ["GET", "HEAD"])) and "GET" or method + (response, content) = self.request(location, redirect_method, body=body, headers = headers, redirections = redirections - 1) + response.previous = old_response + else: + raise RedirectLimit( _("Redirected more times than rediection_limit allows."), response, content) + elif response.status in [200, 203] and method == "GET": + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. +The 'uri' is the URI of the HTTP resource and can begin +with either 'http' or 'https'. The value of 'uri' must be an absolute URI. + +The 'method' is the HTTP method to perform, such as GET, POST, DELETE, etc. +There is no restriction on the methods allowed. + +The 'body' is the entity body to be sent with the request. It is a string +object. + +Any extra headers that are to be sent with the request should be provided in the +'headers' dictionary. + +The maximum number of redirect to follow before raising an +exception is 'redirections. The default is 5. + +The return value is a tuple of (response, content), the first +being and instance of the 'Response' class, the second being +a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = _normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = (scheme == 'https') and HTTPSConnectionWithTimeout or HTTPConnectionWithTimeout + certs = list(self.certificates.iter(authority)) + if scheme == 'https' and certs: + conn = self.connections[conn_key] = connection_type(authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, proxy_info=self.proxy_info) + else: + conn = self.connections[conn_key] = connection_type(authority, timeout=self.timeout, proxy_info=self.proxy_info) + conn.set_debuglevel(debuglevel) + + if method in ["GET", "HEAD"] and 'range' not in headers: + headers['accept-encoding'] = 'compress, gzip' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri + cached_value = self.cache.get(cachekey) + if cached_value: + info = email.message_from_string(cached_value) + try: + content = cached_value.split('\r\n\r\n', 1)[1] + except IndexError: + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in ["PUT"] and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response( { + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response( { + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key] = value + self.status = int(self.get('status', self.status)) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name Added: trunk/toolkits/basemap/lib/httplib2/iri2uri.py =================================================================== --- trunk/toolkits/basemap/lib/httplib2/iri2uri.py (rev 0) +++ trunk/toolkits/basemap/lib/httplib2/iri2uri.py 2007-11-26 18:58:41 UTC (rev 4451) @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (jo...@bi...)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF ), + (0xE000, 0xF8FF ), + (0xF900, 0xFDCF ), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD ), + (0x20000, 0x2FFFD ), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD ), + (0x50000, 0x5FFFD ), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD ), + (0x80000, 0x8FFFD ), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD ), + (0xB0000, 0xBFFFD ), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD ), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD ), + (0x100000, 0x10FFFD) +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:Joh...@ex...", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-11-26 18:48:01 UTC (rev 4450) +++ trunk/toolkits/basemap/setup.py 2007-11-26 18:58:41 UTC (rev 4451) @@ -101,12 +101,17 @@ include_dirs = ["pyshapelib/shapelib"], define_macros = dbf_macros()) ] -# install dap, if not already available. -try: - from dap import client -except ImportError: - packages = packages + ['dap','dap.util','dap.parsers'] - package_dirs['dap'] = os.path.join('lib','dap') +# install dap and httplib2, if not already available. +#try: +# from dap import client +#except ImportError: +packages = packages + ['dap','dap.util','dap.parsers'] +package_dirs['dap'] = os.path.join('lib','dap') +#try: +# import httplib2 +#except ImportError: +packages = packages + ['httplib2'] +package_dirs['httlib2'] = os.path.join('lib','httplib2') if 'setuptools' in sys.modules: # Are we running with setuptools? This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-26 19:09:49
|
Revision: 4454 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4454&view=rev Author: jswhit Date: 2007-11-26 11:08:46 -0800 (Mon, 26 Nov 2007) Log Message: ----------- bump version number Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-26 19:03:56 UTC (rev 4453) +++ trunk/toolkits/basemap/Changelog 2007-11-26 19:08:46 UTC (rev 4454) @@ -1,5 +1,6 @@ +version 0.9.8 (not yet released) * modify NetCDFFile to use dap module to read remote - datasets over http. Include dap module. + datasets over http. Include dap and httplib2 modules. * modify NetCDFFile to automatically apply scale_factor and add_offset, and return masked arrays masked where data == missing_value or _FillValue. Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-26 19:03:56 UTC (rev 4453) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-26 19:08:46 UTC (rev 4454) @@ -19,7 +19,7 @@ # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) -__version__ = '0.9.7' +__version__ = '0.9.8' # supported map projections. _projnames = {'cyl' : 'Cylindrical Equidistant', Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-11-26 19:03:56 UTC (rev 4453) +++ trunk/toolkits/basemap/setup.py 2007-11-26 19:08:46 UTC (rev 4454) @@ -134,7 +134,7 @@ package_data = {'matplotlib.toolkits.basemap':pyproj_datafiles+basemap_datafiles} setup( name = "basemap", - version = "0.9.7", + version = "0.9.8", description = "Plot data on map projections with matplotlib", long_description = """ An add-on toolkit for matplotlib that lets you plot data This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-27 13:12:15
|
Revision: 4463 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4463&view=rev Author: jswhit Date: 2007-11-27 05:12:10 -0800 (Tue, 27 Nov 2007) Log Message: ----------- remove linestyle kwarg from drawparallels and drawmeridians. Modified Paths: -------------- trunk/toolkits/basemap/API_CHANGES trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py Modified: trunk/toolkits/basemap/API_CHANGES =================================================================== --- trunk/toolkits/basemap/API_CHANGES 2007-11-27 08:07:07 UTC (rev 4462) +++ trunk/toolkits/basemap/API_CHANGES 2007-11-27 13:12:10 UTC (rev 4463) @@ -1,5 +1,11 @@ -version 0.9.2: width, height keywords added to Basemap.__init__. -version 0.9.1: createfigure method removed. +version 0.9.8: remove linestyle kwarg from drawparallels and drawmeridians. + add fill_color kwarg to drawmapboundary. +version 0.9.7: added lake_color kwarg to fillcontinents. +version 0.9.6: added "labelstyle" keyword to drawparallels and drawmeridians. + added "zorder" kwarg to drawparallels, drawmeridians, + drawcoastlines, drawstates, drawcountries, drawrivers, +version 0.9.2: width, height keywords added to Basemap.__init__. +version 0.9.1: createfigure method removed. version 0.9: creatfigure method deprecated, new set_aspect axes method used to ensure maps have right aspect ratio. pcolormesh, drawlsmask methods added. Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-27 08:07:07 UTC (rev 4462) +++ trunk/toolkits/basemap/Changelog 2007-11-27 13:12:10 UTC (rev 4463) @@ -1,4 +1,7 @@ version 0.9.8 (not yet released) + * remove 'linestyle' kwarg from drawparallels and + drawmeridians (it never did anything anyway since + it was overridden by the 'dashes' kwarg). * modify NetCDFFile to use dap module to read remote datasets over http. Include dap and httplib2 modules. * modify NetCDFFile to automatically apply scale_factor Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-27 08:07:07 UTC (rev 4462) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-27 13:12:10 UTC (rev 4463) @@ -1356,7 +1356,7 @@ return info def drawparallels(self,circles,color='k',linewidth=1.,zorder=None, \ - linestyle='--',dashes=[1,1],labels=[0,0,0,0],labelstyle=None, \ + dashes=[1,1],labels=[0,0,0,0],labelstyle=None, \ fmt='%g',xoffset=None,yoffset=None,ax=None,**kwargs): """ draw parallels (latitude lines). @@ -1366,7 +1366,6 @@ linewidth - line width for parallels (default 1.) zorder - sets the zorder for parallels (if not specified, uses default zorder for Line2D class). - linestyle - line style for parallels (default '--', i.e. dashed). dashes - dash pattern for parallels (default [1,1], i.e. 1 pixel on, 1 pixel off). labels - list of 4 values (default [0,0,0,0]) that control whether @@ -1461,7 +1460,7 @@ for x,y in zip(xl,yl): # skip if only a point. if len(x) > 1 and len(y) > 1: - l = Line2D(x,y,linewidth=linewidth,linestyle=linestyle) + l = Line2D(x,y,linewidth=linewidth) l.set_color(color) l.set_dashes(dashes) if zorder is not None: @@ -1576,7 +1575,7 @@ self.set_axes_limits(ax=ax) def drawmeridians(self,meridians,color='k',linewidth=1., zorder=None,\ - linestyle='--',dashes=[1,1],labels=[0,0,0,0],labelstyle=None,\ + dashes=[1,1],labels=[0,0,0,0],labelstyle=None,\ fmt='%g',xoffset=None,yoffset=None,ax=None,**kwargs): """ draw meridians (longitude lines). @@ -1586,7 +1585,6 @@ linewidth - line width for meridians (default 1.) zorder - sets the zorder for meridians (if not specified, uses default zorder for Line2D class). - linestyle - line style for meridians (default '--', i.e. dashed). dashes - dash pattern for meridians (default [1,1], i.e. 1 pixel on, 1 pixel off). labels - list of 4 values (default [0,0,0,0]) that control whether @@ -1667,7 +1665,7 @@ for x,y in zip(xl,yl): # skip if only a point. if len(x) > 1 and len(y) > 1: - l = Line2D(x,y,linewidth=linewidth,linestyle=linestyle) + l = Line2D(x,y,linewidth=linewidth) l.set_color(color) l.set_dashes(dashes) if zorder is not None: This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-27 16:21:14
|
Revision: 4464 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4464&view=rev Author: jswhit Date: 2007-11-27 08:21:11 -0800 (Tue, 27 Nov 2007) Log Message: ----------- fmt kwarg to drawparallels and drawmeridians can now be a string formatting function. Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/examples/customticks.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-11-27 13:12:10 UTC (rev 4463) +++ trunk/toolkits/basemap/Changelog 2007-11-27 16:21:11 UTC (rev 4464) @@ -1,4 +1,7 @@ version 0.9.8 (not yet released) + * 'fmt' kwarg to drawparallels and drawmeridians can + now be a custom string formatting function (example + customticks.py demonstrates usage). * remove 'linestyle' kwarg from drawparallels and drawmeridians (it never did anything anyway since it was overridden by the 'dashes' kwarg). Modified: trunk/toolkits/basemap/examples/customticks.py =================================================================== --- trunk/toolkits/basemap/examples/customticks.py 2007-11-27 13:12:10 UTC (rev 4463) +++ trunk/toolkits/basemap/examples/customticks.py 2007-11-27 16:21:11 UTC (rev 4464) @@ -5,19 +5,32 @@ # example showing how to create custom tick labels for a cylindrical # projection. -def deg2str(deg, dir='E', fmt="%3.1f"): +def lat2str(deg): + fmt = "%d" min = 60 * (deg - numpy.floor(deg)) deg = numpy.floor(deg) + dir = 'N' if deg < 0: if min != 0.0: deg += 1.0 min -= 60.0 - if dir=='E': - dir='W' - if dir=='N': - dir='S' - return (u"%d\N{DEGREE SIGN}" + fmt + "' %s") % (numpy.abs(deg), numpy.abs(min), dir) + dir='S' + return (u"%d\N{DEGREE SIGN} %g' %s") % (numpy.abs(deg), numpy.abs(min), dir) +def lon2str(deg): + min = 60 * (deg - numpy.floor(deg)) + deg = numpy.floor(deg) + dir = 'E' + if deg < 0: + if min != 0.0: + deg += 1.0 + min -= 60.0 + dir='W' + return (u"%d\N{DEGREE SIGN} %g' %s") % (numpy.abs(deg), numpy.abs(min), dir) + +# (1) use matplotlib custom tick formatter +# instead of Basemap labelling methods. + # create figure. fig=pylab.figure() # create Basemap instance (regular lat/lon projection). @@ -35,14 +48,34 @@ ax = pylab.gca() # add custom ticks. # This only works for projection='cyl'. -def xformat(x, pos=None): return deg2str(x, 'E', fmt="%2.0f") +def xformat(x, pos=None): return lon2str(x) xformatter = FuncFormatter(xformat) ax.xaxis.set_major_formatter(xformatter) -def yformat(y, pos=None): return deg2str(y, 'N', fmt="%2.0f") +def yformat(y, pos=None): return lat2str(y) yformatter = FuncFormatter(yformat) ax.yaxis.set_major_formatter(yformatter) -ax.fmt_xdata = lambda x: deg2str(x, 'E', fmt="%5.3f") -ax.fmt_ydata = lambda y: deg2str(y, 'N', fmt="%5.3f") +ax.fmt_xdata = lambda x: lon2str(x) +ax.fmt_ydata = lambda y: lat2str(y) ax.grid() ax.set_title('Hawaii') + +# (2) use Basemap labelling methods, but pass a +# custom formatting function with the 'fmt' keyword. + +# create figure. +fig = pylab.figure() +# create Basemap instance. +m = Basemap(llcrnrlon=-156.5,llcrnrlat=18.75,urcrnrlon=-154.5,urcrnrlat=20.5, + resolution='h',projection='cyl') +# draw coastlines, fill land and lake areas. +m.drawcoastlines() +m.fillcontinents(color='coral',lake_color='aqua') +# background color will be used for oceans. +m.drawmapboundary(fill_color='aqua') +# label meridians and parallels, passing string formatting function +# with 'fmt' keyword. +m.drawparallels(numpy.linspace(18,21,7),labels=[1,0,0,0],fmt=lat2str) +m.drawmeridians(numpy.linspace(-157,-154,7),labels=[0,0,0,1],fmt=lon2str) +pylab.title('Hawaii') + pylab.show() Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-27 13:12:10 UTC (rev 4463) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-11-27 16:21:11 UTC (rev 4464) @@ -1375,7 +1375,9 @@ but not the right and top. labelstyle - if set to "+/-", north and south latitudes are labelled with "+" and "-", otherwise they are labelled with "N" and "S". - fmt is a format string to format the parallel labels (default '%g'). + fmt can be is a format string to format the parallel labels + (default '%g') or a function that takes a latitude value + in degrees as it's only argument and returns a formatted string. xoffset - label offset from edge of map in x-direction (default is 0.01 times width of map in map projection coordinates). yoffset - label offset from edge of map in y-direction @@ -1517,36 +1519,40 @@ nl = _searchlist(lats,lat) nr = _searchlist(lats[::-1],lat) if nr != -1: nr = len(lons)-nr-1 - if lat<0: - if rcParams['text.usetex']: - if labelstyle=='+/-': - latlabstr = r'${\/-%s\/^{\circ}}$'%fmt + latlab = fmt(lat) + try: # fmt is a function that returns a formatted string + latlab = fmt(lat) + except: # fmt is a format string. + if lat<0: + if rcParams['text.usetex']: + if labelstyle=='+/-': + latlabstr = r'${\/-%s\/^{\circ}}$'%fmt + else: + latlabstr = r'${%s\/^{\circ}\/S}$'%fmt else: - latlabstr = r'${%s\/^{\circ}\/S}$'%fmt - else: - if labelstyle=='+/-': - latlabstr = u'-%s\N{DEGREE SIGN}'%fmt + if labelstyle=='+/-': + latlabstr = u'-%s\N{DEGREE SIGN}'%fmt + else: + latlabstr = u'%s\N{DEGREE SIGN}S'%fmt + latlab = latlabstr%npy.fabs(lat) + elif lat>0: + if rcParams['text.usetex']: + if labelstyle=='+/-': + latlabstr = r'${\/+%s\/^{\circ}}$'%fmt + else: + latlabstr = r'${%s\/^{\circ}\/N}$'%fmt else: - latlabstr = u'%s\N{DEGREE SIGN}S'%fmt - latlab = latlabstr%npy.fabs(lat) - elif lat>0: - if rcParams['text.usetex']: - if labelstyle=='+/-': - latlabstr = r'${\/+%s\/^{\circ}}$'%fmt - else: - latlabstr = r'${%s\/^{\circ}\/N}$'%fmt + if labelstyle=='+/-': + latlabstr = u'+%s\N{DEGREE SIGN}'%fmt + else: + latlabstr = u'%s\N{DEGREE SIGN}N'%fmt + latlab = latlabstr%lat else: - if labelstyle=='+/-': - latlabstr = u'+%s\N{DEGREE SIGN}'%fmt + if rcParams['text.usetex']: + latlabstr = r'${%s\/^{\circ}}$'%fmt else: - latlabstr = u'%s\N{DEGREE SIGN}N'%fmt - latlab = latlabstr%lat - else: - if rcParams['text.usetex']: - latlabstr = r'${%s\/^{\circ}}$'%fmt - else: - latlabstr = u'%s\N{DEGREE SIGN}'%fmt - latlab = latlabstr%lat + latlabstr = u'%s\N{DEGREE SIGN}'%fmt + latlab = latlabstr%lat # parallels can intersect each map edge twice. for i,n in enumerate([nl,nr]): # don't bother if close to the first label. @@ -1594,7 +1600,9 @@ but not the right and top. labelstyle - if set to "+/-", east and west longitudes are labelled with "+" and "-", otherwise they are labelled with "E" and "W". - fmt is a format string to format the meridian labels (default '%g'). + fmt can be is a format string to format the meridian labels + (default '%g') or a function that takes a longitude value + in degrees as it's only argument and returns a formatted string. xoffset - label offset from edge of map in x-direction (default is 0.01 times width of map in map projection coordinates). yoffset - label offset from edge of map in y-direction @@ -1721,42 +1729,45 @@ lons = [(lon+360) % 360 for lon in lons] for lon in meridians: # adjust so 0 <= lon < 360 - lon = (lon+360) % 360 + lon2 = (lon+360) % 360 # find index of meridian (there may be two, so # search from left and right). - nl = _searchlist(lons,lon) - nr = _searchlist(lons[::-1],lon) + nl = _searchlist(lons,lon2) + nr = _searchlist(lons[::-1],lon2) if nr != -1: nr = len(lons)-nr-1 - if lon>180: - if rcParams['text.usetex']: - if labelstyle=='+/-': - lonlabstr = r'${\/-%s\/^{\circ}}$'%fmt + try: # fmt is a function that returns a formatted string + lonlab = fmt(lon) + except: # fmt is a format string. + if lon2>180: + if rcParams['text.usetex']: + if labelstyle=='+/-': + lonlabstr = r'${\/-%s\/^{\circ}}$'%fmt + else: + lonlabstr = r'${%s\/^{\circ}\/W}$'%fmt else: - lonlabstr = r'${%s\/^{\circ}\/W}$'%fmt - else: - if labelstyle=='+/-': - lonlabstr = u'-%s\N{DEGREE SIGN}'%fmt + if labelstyle=='+/-': + lonlabstr = u'-%s\N{DEGREE SIGN}'%fmt + else: + lonlabstr = u'%s\N{DEGREE SIGN}W'%fmt + lonlab = lonlabstr%npy.fabs(lon2-360) + elif lon2<180 and lon2 != 0: + if rcParams['text.usetex']: + if labelstyle=='+/-': + lonlabstr = r'${\/+%s\/^{\circ}}$'%fmt + else: + lonlabstr = r'${%s\/^{\circ}\/E}$'%fmt else: - lonlabstr = u'%s\N{DEGREE SIGN}W'%fmt - lonlab = lonlabstr%npy.fabs(lon-360) - elif lon<180 and lon != 0: - if rcParams['text.usetex']: - if labelstyle=='+/-': - lonlabstr = r'${\/+%s\/^{\circ}}$'%fmt - else: - lonlabstr = r'${%s\/^{\circ}\/E}$'%fmt + if labelstyle=='+/-': + lonlabstr = u'+%s\N{DEGREE SIGN}'%fmt + else: + lonlabstr = u'%s\N{DEGREE SIGN}E'%fmt + lonlab = lonlabstr%lon2 else: - if labelstyle=='+/-': - lonlabstr = u'+%s\N{DEGREE SIGN}'%fmt + if rcParams['text.usetex']: + lonlabstr = r'${%s\/^{\circ}}$'%fmt else: - lonlabstr = u'%s\N{DEGREE SIGN}E'%fmt - lonlab = lonlabstr%lon - else: - if rcParams['text.usetex']: - lonlabstr = r'${%s\/^{\circ}}$'%fmt - else: - lonlabstr = u'%s\N{DEGREE SIGN}'%fmt - lonlab = lonlabstr%lon + lonlabstr = u'%s\N{DEGREE SIGN}'%fmt + lonlab = lonlabstr%lon2 # meridians can intersect each map edge twice. for i,n in enumerate([nl,nr]): lat = lats[n]/100. This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-11-27 17:34:33
|
Revision: 4468 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4468&view=rev Author: jswhit Date: 2007-11-27 09:34:20 -0800 (Tue, 27 Nov 2007) Log Message: ----------- use PyNIO to read data if the pure python NetCDF reader fails (PyNIO can read GRIB1+2, HDF4, NetCDF4 and HDFEOS2). Modified Paths: -------------- trunk/toolkits/basemap/examples/plotprecip.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/pupynere.py Modified: trunk/toolkits/basemap/examples/plotprecip.py =================================================================== --- trunk/toolkits/basemap/examples/plotprecip.py 2007-11-27 17:07:38 UTC (rev 4467) +++ trunk/toolkits/basemap/examples/plotprecip.py 2007-11-27 17:34:20 UTC (rev 4468) @@ -13,7 +13,6 @@ # data from http://www.srh.noaa.gov/rfcshare/precip_analysis_new.php prcpvar = nc.variables['amountofprecip'] data = 0.01*prcpvar[:] -data = pylab.clip(data,0,10000) latcorners = nc.variables['lat'][:] loncorners = -nc.variables['lon'][:] plottitle = prcpvar.long_name+' for period ending '+prcpvar.dateofdata Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/pupynere.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/pupynere.py 2007-11-27 17:07:38 UTC (rev 4467) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/pupynere.py 2007-11-27 17:34:20 UTC (rev 4468) @@ -36,6 +36,11 @@ from dap.client import open as open_remote from dap.dtypes import ArrayType, GridType, typemap +has_pynio = True +try: + from PyNGL import nio +except ImportError: + has_pynio = False ABSENT = '\x00' * 8 ZERO = '\x00' * 4 @@ -54,14 +59,31 @@ def NetCDFFile(file): """NetCDF File reader. API is the same as Scientific.IO.NetCDF. If 'file' is a URL that starts with 'http', it is assumed - to be a remote OPenDAP dataest, and the python dap client is used + to be a remote OPenDAP dataset, and the python dap client is used to retrieve the data. Only the OPenDAP Array and Grid data types are recognized. If file does not start with 'http', it - is assumed to be a local NetCDF file.""" + is assumed to be a local file. If possible, the file will be read + with a pure python NetCDF reader, otherwise PyNIO + (http://www.pyngl.ucar.edu/Nio.shtml) will be used (if it is installed). + PyNIO supports NetCDF version 4, GRIB1, GRIB2, HDF4 and HDFEOS2 files. + """ if file.startswith('http'): return _RemoteFile(file) else: - return _LocalFile(file) + # use pynio if it is installed and the file cannot + # be read with the pure python netCDF reader. This allows + # netCDF version 4, GRIB1, GRIB2, HDF4 and HDFEOS files + # to be read. + if has_pynio: + try: + f = _LocalFile(file) + except: + f = nio.open_file(file) + # otherwise, use the pupynere netCDF 3 pure python reader. + # (will fail if file is not a netCDF version 3 file). + else: + f = _LocalFile(file) + return f def _maskandscale(var,datout): if hasattr(var, 'missing_value') and (datout == var.missing_value).any(): This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-03 22:32:40
|
Revision: 4570 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4570&view=rev Author: jswhit Date: 2007-12-03 14:32:28 -0800 (Mon, 03 Dec 2007) Log Message: ----------- add default for altitude of geostationary orbit in 'geos' projection. Modified Paths: -------------- trunk/toolkits/basemap/examples/geos_demo.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py Modified: trunk/toolkits/basemap/examples/geos_demo.py =================================================================== --- trunk/toolkits/basemap/examples/geos_demo.py 2007-12-03 19:16:17 UTC (rev 4569) +++ trunk/toolkits/basemap/examples/geos_demo.py 2007-12-03 22:32:28 UTC (rev 4570) @@ -3,12 +3,10 @@ # create Basemap instance for Geostationary (satellite view) projection. lon_0 = float(raw_input('enter reference longitude (lon_0):')) -#h = float(raw_input('enter satellite height above equator in meters (satellite_height):')) -h = 35785831.0 # map with land/sea mask plotted fig=figure() -m = Basemap(projection='geos',lon_0=lon_0,satellite_height=h,rsphere=(6378137.00,6356752.3142),resolution=None) +m = Basemap(projection='geos',lon_0=lon_0,rsphere=(6378137.00,6356752.3142),resolution=None) # plot land-sea mask. rgba_land = (0,255,0,255) # land green. rgba_ocean = (0,0,255,255) # ocean blue. @@ -18,11 +16,11 @@ m.drawparallels(arange(-90.,120.,30.)) m.drawmeridians(arange(0.,420.,60.)) m.drawmapboundary() -title('Geostationary Map Centered on Lon=%s, Satellite Height=%s' % (lon_0,h)) +title('Geostationary Map Centered on Lon=%s' % (lon_0)) # map with continents drawn and filled. fig = figure() -m = Basemap(projection='geos',lon_0=lon_0,satellite_height=h,rsphere=(6378137.00,6356752.3142),resolution='l') +m = Basemap(projection='geos',lon_0=lon_0,rsphere=(6378137.00,6356752.3142),resolution='l') m.drawcoastlines() m.drawmapboundary(fill_color='aqua') m.fillcontinents(color='coral',lake_color='aqua') @@ -31,5 +29,5 @@ m.drawparallels(arange(-90.,120.,30.)) m.drawmeridians(arange(0.,420.,60.)) m.drawmapboundary() -title('Geostationary Map Centered on Lon=%s, Satellite Height=%s' % (lon_0,h)) +title('Geostationary Map Centered on Lon=%s' % (lon_0)) show() Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-03 19:16:17 UTC (rev 4569) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-03 22:32:28 UTC (rev 4570) @@ -212,7 +212,7 @@ on the north or south pole. The longitude lon_0 is at 6-o'clock, and the latitude circle boundinglat is tangent to the edge of the map at lon_0. satellite_height - height of satellite (in m) above equator - - only relevant for geostationary projections ('geos'). + only relevant for geostationary projections ('geos'). Default 35,786 km. Here are the most commonly used class methods (see the docstring for each for more details): @@ -324,7 +324,7 @@ lat_0=None, lon_0=None, lon_1=None, lon_2=None, suppress_ticks=True, - satellite_height=None, + satellite_height=35786000, boundinglat=None, anchor='C', ax=None): @@ -362,7 +362,7 @@ _insert_validated(projparams, lon_0, 'lon_0', -360, 720) _insert_validated(projparams, lon_1, 'lon_1', -360, 720) _insert_validated(projparams, lon_2, 'lon_2', -360, 720) - if satellite_height is not None: + if projection == 'geos': projparams['h'] = satellite_height # check for sane values of projection corners. using_corners = (None not in [llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat]) @@ -488,8 +488,8 @@ if npy.abs(lat_0) < 1.e-2: lat_0 = 1.e-2 projparams['lat_0'] = lat_0 elif projection == 'geos': - if lon_0 is None and satellite_height is None: - raise ValueError, 'must specify lon_0 and satellite_height for Geostationary basemap' + if lon_0 is None: + raise ValueError, 'must specify lon_0 for Geostationary basemap' if width is not None or height is not None: print 'warning: width and height keywords ignored for %s projection' % self.projection if not using_corners: This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-04 17:36:58
|
Revision: 4579 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4579&view=rev Author: jswhit Date: 2007-12-04 09:36:55 -0800 (Tue, 04 Dec 2007) Log Message: ----------- add date2num and num2date functions to basemap namespace. Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-12-04 16:06:20 UTC (rev 4578) +++ trunk/toolkits/basemap/Changelog 2007-12-04 17:36:55 UTC (rev 4579) @@ -1,3 +1,5 @@ + * added num2date and date2num functions, which use + included netcdftime module. version 0.9.8 (svn revision 4526) * fixes for filling continents in orthographic projection. * added 'maskandscale' kwarg to NetCDFFile to Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2007-12-04 16:06:20 UTC (rev 4578) +++ trunk/toolkits/basemap/MANIFEST.in 2007-12-04 17:36:55 UTC (rev 4579) @@ -78,6 +78,7 @@ recursive-include lib/httplib2 * recursive-include lib/dbflib * recursive-include lib/shapelib * +recursive-include lib/netcdftime * include lib/matplotlib/toolkits/basemap/data/5minmask.bin include lib/matplotlib/toolkits/basemap/data/GL27 include lib/matplotlib/toolkits/basemap/data/countries_c.dat Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 16:06:20 UTC (rev 4578) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 17:36:55 UTC (rev 4579) @@ -28,7 +28,7 @@ from numpy import linspace, squeeze, ma from matplotlib.cbook import is_scalar, dedent from shapelib import ShapeFile -import _geos, pupynere +import _geos, pupynere, netcdftime # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) @@ -2857,3 +2857,80 @@ else: f = pupynere._LocalFile(file,maskandscale) return f + +def num2date(times,units,unit_format='%Y-%m-%d %H:%M:%S',calendar='standard'): + """ + Return datetime objects given numeric time values. The units + of the numeric time values are described by the units argument + and the unit_format and calendar keywords. + + Arguments: + + times - numeric time values. Maximum resolution is 1 second. + units - a string of the form '<time-units> since <reference time>' + describing the time units. <time-units> can be days, hours, minutes + or seconds. <reference-time> is the time origin, defined by the format + keyword (see below). For example, a valid choice would be + units='hours since 0001-01-01 00:00:00'. + + Keyword Arguments: + + format - a string describing a reference time. This string is converted + to a year,month,day,hour,minute,second tuple by strptime. The default + format is '%Y-%m-%d %H:%M:%S'. See the time.strptime docstring for other + valid formats. + + calendar - describes the calendar used in the time calculations. + All the values currently defined in the CF metadata convention + (http://cf-pcmdi.llnl.gov/documents/cf-conventions/) are supported. + Valid calendars 'standard', 'gregorian', 'proleptic_gregorian' + 'noleap', '365_day', '360_day', 'julian'. Default is 'standard'. + + Returns a datetime instance, or an array of datetime instances. + + The datetime instances returned are 'real' python datetime + objects if the date falls in the Gregorian calendar (i.e. + calendar='proleptic_gregorian', or calendar = 'standard' or 'gregorian' + and the date is after 1582-10-15). Otherwise, they are 'phony' datetime + objects which support some but not all the methods of 'real' python + datetime objects. This is because the python datetime module cannot + the weird dates in some calendars (such as '360_day' and 'all_leap' + which don't exist in any real world calendar. + """ + cdftime = netcdftime.utime(units,calendar=calendar,format=unit_format) + return cdftime.num2date(times) + +def date2num(dates,units,unit_format='%Y-%m-%d %H:%M:%S',calendar='standard'): + """ + Return numeric time values given datetime objects. The units + of the numeric time values are described by the units argument + and the unit_format and calendar keywords. + + Arguments: + + dates - A datetime object or a sequence of datetime objects. + units - a string of the form '<time-units> since <reference time>' + describing the time units. <time-units> can be days, hours, minutes + or seconds. <reference-time> is the time origin, defined by the format + keyword (see below). For example, a valid choice would be + units='hours since 0001-01-01 00:00:00'. + + Keyword Arguments: + + format - a string describing a reference time. This string is converted + to a year,month,day,hour,minute,second tuple by strptime. The default + format is '%Y-%m-%d %H:%M:%S'. See the time.strptime docstring for other + valid formats. + + calendar - describes the calendar used in the time calculations. + All the values currently defined in the CF metadata convention + (http://cf-pcmdi.llnl.gov/documents/cf-conventions/) are supported. + Valid calendars 'standard', 'gregorian', 'proleptic_gregorian' + 'noleap', '365_day', '360_day', 'julian'. Default is 'standard'. + + Returns a numeric time value, or an array of numeric time values. + + The maximum resolution of the numeric time values is 1 second. + """ + cdftime = netcdftime.utime(units,calendar=calendar,format=unit_format) + return cdftime.date2num(dates) Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-12-04 16:06:20 UTC (rev 4578) +++ trunk/toolkits/basemap/setup.py 2007-12-04 17:36:55 UTC (rev 4579) @@ -114,6 +114,10 @@ packages = packages + ['httplib2'] package_dirs['httlib2'] = os.path.join('lib','httplib2') +# install netcdftime +packages = packages + ['netcdftime'] +package_dirs['httlib2'] = os.path.join('lib','netcdftime') + if 'setuptools' in sys.modules: # Are we running with setuptools? # if so, need to specify all the packages in heirarchy This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-04 19:18:08
|
Revision: 4584 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4584&view=rev Author: jswhit Date: 2007-12-04 11:18:03 -0800 (Tue, 04 Dec 2007) Log Message: ----------- move netcdftime.py Modified Paths: -------------- trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/examples/fcstmaps.py trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Added Paths: ----------- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/netcdftime.py Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2007-12-04 19:16:57 UTC (rev 4583) +++ trunk/toolkits/basemap/MANIFEST.in 2007-12-04 19:18:03 UTC (rev 4584) @@ -68,6 +68,7 @@ include lib/matplotlib/toolkits/basemap/pyproj.py include lib/matplotlib/toolkits/basemap/cm.py include lib/matplotlib/toolkits/basemap/pupynere.py +include lib/matplotlib/toolkits/basemap/netcdftime.py include pyshapelib/README pyshapelib/COPYING pyshapelib/ChangeLog pyshapelib/NEWS include pyshapelib/*.i pyshapelib/*.c pyshapelib/*.py pyshapelib/*.h include pyshapelib/*.shp pyshapelib/*.shx pyshapelib/*.dbf @@ -78,7 +79,6 @@ recursive-include lib/httplib2 * recursive-include lib/dbflib * recursive-include lib/shapelib * -recursive-include lib/netcdftime * include lib/matplotlib/toolkits/basemap/data/5minmask.bin include lib/matplotlib/toolkits/basemap/data/GL27 include lib/matplotlib/toolkits/basemap/data/countries_c.dat Modified: trunk/toolkits/basemap/examples/fcstmaps.py =================================================================== --- trunk/toolkits/basemap/examples/fcstmaps.py 2007-12-04 19:16:57 UTC (rev 4583) +++ trunk/toolkits/basemap/examples/fcstmaps.py 2007-12-04 19:18:03 UTC (rev 4584) @@ -36,9 +36,6 @@ longitudes = data.variables['lon'] fcsttimes = data.variables['time'] times = fcsttimes[0:6] # first 6 forecast times. -# change 0.0 to 00 at end of time units string -# (so strptime will understand it). -timeunits = fcsttimes.units[:-2]+'0' ntimes = len(times) # put forecast times in YYYYMMDDHH format. verifdates = [] @@ -46,7 +43,7 @@ for time in times: print time, times[0] fcsthrs.append(int((time-times[0])*24)) - fdate = num2date(time,'days since 0001-01-01 00:00:00') + fdate = num2date(time,fcsttimes.units) verifdates.append(fdate.strftime('%Y%m%d%H')) print fcsthrs print verifdates Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 19:16:57 UTC (rev 4583) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 19:18:03 UTC (rev 4584) @@ -2858,28 +2858,21 @@ f = pupynere._LocalFile(file,maskandscale) return f -def num2date(times,units,unit_format='%Y-%m-%d %H:%M:%S',calendar='standard'): +def num2date(times,units,calendar='standard'): """ Return datetime objects given numeric time values. The units of the numeric time values are described by the units argument - and the unit_format and calendar keywords. + and the calendar keyword. Arguments: times - numeric time values. Maximum resolution is 1 second. + units - a string of the form '<time-units> since <reference time>' describing the time units. <time-units> can be days, hours, minutes - or seconds. <reference-time> is the time origin, defined by the format - keyword (see below). For example, a valid choice would be - units='hours since 0001-01-01 00:00:00'. + or seconds. <reference-time> is the time origin. A valid choice + would be units='hours since 0001-01-01 00:00:00'. - Keyword Arguments: - - format - a string describing a reference time. This string is converted - to a year,month,day,hour,minute,second tuple by strptime. The default - format is '%Y-%m-%d %H:%M:%S'. See the time.strptime docstring for other - valid formats. - calendar - describes the calendar used in the time calculations. All the values currently defined in the CF metadata convention (http://cf-pcmdi.llnl.gov/documents/cf-conventions/) are supported. @@ -2897,31 +2890,24 @@ the weird dates in some calendars (such as '360_day' and 'all_leap' which don't exist in any real world calendar. """ - cdftime = netcdftime.utime(units,calendar=calendar,format=unit_format) + cdftime = netcdftime.utime(units,calendar=calendar) return cdftime.num2date(times) -def date2num(dates,units,unit_format='%Y-%m-%d %H:%M:%S',calendar='standard'): +def date2num(dates,units,calendar='standard'): """ Return numeric time values given datetime objects. The units of the numeric time values are described by the units argument - and the unit_format and calendar keywords. + and the calendar keyword. Arguments: dates - A datetime object or a sequence of datetime objects. + units - a string of the form '<time-units> since <reference time>' describing the time units. <time-units> can be days, hours, minutes - or seconds. <reference-time> is the time origin, defined by the format - keyword (see below). For example, a valid choice would be - units='hours since 0001-01-01 00:00:00'. + or seconds. <reference-time> is the time origin. A valid choice + would be units='hours since 0001-01-01 00:00:00'. - Keyword Arguments: - - format - a string describing a reference time. This string is converted - to a year,month,day,hour,minute,second tuple by strptime. The default - format is '%Y-%m-%d %H:%M:%S'. See the time.strptime docstring for other - valid formats. - calendar - describes the calendar used in the time calculations. All the values currently defined in the CF metadata convention (http://cf-pcmdi.llnl.gov/documents/cf-conventions/) are supported. @@ -2932,5 +2918,5 @@ The maximum resolution of the numeric time values is 1 second. """ - cdftime = netcdftime.utime(units,calendar=calendar,format=unit_format) + cdftime = netcdftime.utime(units,calendar=calendar) return cdftime.date2num(dates) Added: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/netcdftime.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/netcdftime.py (rev 0) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/netcdftime.py 2007-12-04 19:18:03 UTC (rev 4584) @@ -0,0 +1,859 @@ +""" +Performs conversions of netCDF time coordinate data to/from datetime objects. +""" +import math, numpy, re, time +from datetime import datetime as real_datetime + +_units = ['days','hours','minutes','seconds','day','hour','minute','second'] +_calendars = ['standard','gregorian','proleptic_gregorian','noleap','julian','all_leap','365_day','366_day','360_day'] + +__version__ = '0.6' + +class datetime: + """ +Phony datetime object which mimics the python datetime object, +but allows for dates that don't exist in the proleptic gregorian calendar. +Doesn't do timedelta operations, doesn't overload + and -. + +Has strftime, timetuple and __repr__ methods. The format +of the string produced by __repr__ is controlled by self.format +(default %Y-%m-%d %H:%M:%S). + +Instance variables are year,month,day,hour,minute,second,dayofwk,dayofyr +and format. + """ + def __init__(self,year,month,day,hour=0,minute=0,second=0,dayofwk=-1,dayofyr=1): + """dayofyr set to 1 by default - otherwise time.strftime will complain""" + self.year=year + self.month=month + self.day=day + self.hour=hour + self.minute=minute + self.dayofwk=dayofwk + self.dayofyr=dayofyr + self.second=second + self.format='%Y-%m-%d %H:%M:%S' + def strftime(self,format=None): + if format is None: + format = self.format + return _strftime(self,format) + def timetuple(self): + return (self.year,self.month,self.day,self.hour,self.minute,self.second,self.dayofwk,self.dayofyr,-1) + def __repr__(self): + return self.strftime(self.format) + +def JulianDayFromDate(date,calendar='standard'): + + """ + +creates a Julian Day from a 'datetime-like' object. Returns the fractional +Julian Day (resolution 1 second). + +if calendar='standard' or 'gregorian' (default), Julian day follows Julian +Calendar on and before 1582-10-5, Gregorian calendar after 1582-10-15. + +if calendar='proleptic_gregorian', Julian Day follows gregorian calendar. + +if calendar='julian', Julian Day follows julian calendar. + +Algorithm: + +Meeus, Jean (1998) Astronomical Algorithms (2nd Edition). Willmann-Bell, +Virginia. p. 63 + + """ + + # based on redate.py by David Finlayson. + + year=date.year; month=date.month; day=date.day + hour=date.hour; minute=date.minute; second=date.second + # Convert time to fractions of a day + day = day + hour/24.0 + minute/1440.0 + second/86400.0 + + # Start Meeus algorithm (variables are in his notation) + if (month < 3): + month = month + 12 + year = year - 1 + + A = int(year/100) + + jd = int(365.25 * (year + 4716)) + int(30.6001 * (month + 1)) + \ + day - 1524.5 + + # optionally adjust the jd for the switch from + # the Julian to Gregorian Calendar + # here assumed to have occurred the day after 1582 October 4 + if calendar in ['standard','gregorian']: + if jd >= 2299170.5: + # 1582 October 15 (Gregorian Calendar) + B = 2 - A + int(A/4) + elif jd < 2299160.5: + # 1582 October 5 (Julian Calendar) + B = 0 + else: + raise ValueError, 'impossible date (falls in gap between end of Julian calendar and beginning of Gregorian calendar' + elif calendar == 'proleptic_gregorian': + B = 2 - A + int(A/4) + elif calendar == 'julian': + B = 0 + else: + raise ValueError, 'unknown calendar, must be one of julian,standard,gregorian,proleptic_gregorian, got %s' % calendar + + # adjust for Julian calendar if necessary + jd = jd + B + + return jd + +def _NoLeapDayFromDate(date): + + """ + +creates a Julian Day for a calendar with no leap years from a datetime +instance. Returns the fractional Julian Day (resolution 1 second). + + """ + + year=date.year; month=date.month; day=date.day + hour=date.hour; minute=date.minute; second=date.second + # Convert time to fractions of a day + day = day + hour/24.0 + minute/1440.0 + second/86400.0 + + # Start Meeus algorithm (variables are in his notation) + if (month < 3): + month = month + 12 + year = year - 1 + + jd = int(365. * (year + 4716)) + int(30.6001 * (month + 1)) + \ + day - 1524.5 + + return jd + +def _AllLeapFromDate(date): + + """ + +creates a Julian Day for a calendar where all years have 366 days from +a 'datetime-like' object. +Returns the fractional Julian Day (resolution 1 second). + + """ + + year=date.year; month=date.month; day=date.day + hour=date.hour; minute=date.minute; second=date.second + # Convert time to fractions of a day + day = day + hour/24.0 + minute/1440.0 + second/86400.0 + + # Start Meeus algorithm (variables are in his notation) + if (month < 3): + month = month + 12 + year = year - 1 + + jd = int(366. * (year + 4716)) + int(30.6001 * (month + 1)) + \ + day - 1524.5 + + return jd + +def _360DayFromDate(date): + + """ + +creates a Julian Day for a calendar where all months have 30 daysfrom +a 'datetime-like' object. +Returns the fractional Julian Day (resolution 1 second). + + """ + + year=date.year; month=date.month; day=date.day + hour=date.hour; minute=date.minute; second=date.second + # Convert time to fractions of a day + day = day + hour/24.0 + minute/1440.0 + second/86400.0 + + jd = int(360. * (year + 4716)) + int(30. * (month - 1)) + day + + return jd + +def DateFromJulianDay(JD,calendar='standard'): + """ + +returns a 'datetime-like' object given Julian Day. Julian Day is a +fractional day with a resolution of 1 second. + +if calendar='standard' or 'gregorian' (default), Julian day follows Julian +Calendar on and before 1582-10-5, Gregorian calendar after 1582-10-15. + +if calendar='proleptic_gregorian', Julian Day follows gregorian calendar. + +if calendar='julian', Julian Day follows julian calendar. + +The datetime object is a 'real' datetime object if the date falls in +the Gregorian calendar (i.e. calendar='proleptic_gregorian', or +calendar = 'standard'/'gregorian' and the date is after 1582-10-15). +Otherwise, it's a 'phony' datetime object which is actually an instance +of netcdftime.datetime. + + +Algorithm: + +Meeus, Jean (1998) Astronomical Algorithms (2nd Edition). Willmann-Bell, +Virginia. p. 63 + + """ + + # based on redate.py by David Finlayson. + + if JD < 0: + raise ValueError, 'Julian Day must be positive' + + dayofwk = int(math.fmod(int(JD + 1.5),7)) + (F, Z) = math.modf(JD + 0.5) + Z = int(Z) + if calendar in ['standard','gregorian']: + if JD < 2299160.5: + A = Z + else: + alpha = int((Z - 1867216.25)/36524.25) + A = Z + 1 + alpha - int(alpha/4) + + elif calendar == 'proleptic_gregorian': + alpha = int((Z - 1867216.25)/36524.25) + A = Z + 1 + alpha - int(alpha/4) + elif calendar == 'julian': + A = Z + else: + raise ValueError, 'unknown calendar, must be one of julian,standard,gregorian,proleptic_gregorian, got %s' % calendar + + B = A + 1524 + C = int((B - 122.1)/365.25) + D = int(365.25 * C) + E = int((B - D)/30.6001) + + # Convert to date + day = B - D - int(30.6001 * E) + F + nday = B-D-123 + if nday <= 305: + dayofyr = nday+60 + else: + dayofyr = nday-305 + if E < 14: + month = E - 1 + else: + month = E - 13 + + if month > 2: + year = C - 4716 + else: + year = C - 4715 + + # a leap year? + leap = 0 + if year % 4 == 0: + leap = 1 + if calendar == 'proleptic_gregorian' or \ + (calendar in ['standard','gregorian'] and JD >= 2299160.5): + if year % 100 == 0 and year % 400 != 0: + print year % 100, year % 400 + leap = 0 + if leap and month > 2: + dayofyr = dayofyr + leap + + # Convert fractions of a day to time + (dfrac, days) = math.modf(day/1.0) + (hfrac, hours) = math.modf(dfrac * 24.0) + (mfrac, minutes) = math.modf(hfrac * 60.0) + seconds = round(mfrac * 60.0) # seconds are rounded + + if seconds > 59: + seconds = 0 + minutes = minutes + 1 + if minutes > 59: + minutes = 0 + hours = hours + 1 + if hours > 23: + hours = 0 + days = days + 1 + + # return a 'real' datetime instance if calendar is gregorian. + if calendar == 'proleptic_gregorian' or \ + (calendar in ['standard','gregorian'] and JD >= 2299160.5): + return real_datetime(year,month,int(days),int(hours),int(minutes),int(seconds)) + else: + # or else, return a 'datetime-like' instance. + return datetime(year,month,int(days),int(hours),int(minutes),int(seconds),dayofwk,dayofyr) + +def _DateFromNoLeapDay(JD): + """ + +returns a 'datetime-like' object given Julian Day for a calendar with no leap +days. Julian Day is a fractional day with a resolution of 1 second. + + """ + + # based on redate.py by David Finlayson. + + if JD < 0: + raise ValueError, 'Julian Day must be positive' + + dayofwk = int(math.fmod(int(JD + 1.5),7)) + (F, Z) = math.modf(JD + 0.5) + Z = int(Z) + A = Z + B = A + 1524 + C = int((B - 122.1)/365.) + D = int(365. * C) + E = int((B - D)/30.6001) + + # Convert to date + day = B - D - int(30.6001 * E) + F + nday = B-D-123 + if nday <= 305: + dayofyr = nday+60 + else: + dayofyr = nday-305 + if E < 14: + month = E - 1 + else: + month = E - 13 + + if month > 2: + year = C - 4716 + else: + year = C - 4715 + + # Convert fractions of a day to time + (dfrac, days) = math.modf(day/1.0) + (hfrac, hours) = math.modf(dfrac * 24.0) + (mfrac, minutes) = math.modf(hfrac * 60.0) + seconds = round(mfrac * 60.0) # seconds are rounded + + if seconds > 59: + seconds = 0 + minutes = minutes + 1 + if minutes > 59: + minutes = 0 + hours = hours + 1 + if hours > 23: + hours = 0 + days = days + 1 + + return datetime(year,month,int(days),int(hours),int(minutes),int(seconds), dayofwk, dayofyr) + +def _DateFromAllLeap(JD): + """ + +returns a 'datetime-like' object given Julian Day for a calendar where all +years have 366 days. +Julian Day is a fractional day with a resolution of 1 second. + + """ + + # based on redate.py by David Finlayson. + + if JD < 0: + raise ValueError, 'Julian Day must be positive' + + dayofwk = int(math.fmod(int(JD + 1.5),7)) + (F, Z) = math.modf(JD + 0.5) + Z = int(Z) + A = Z + B = A + 1524 + C = int((B - 122.1)/366.) + D = int(366. * C) + E = int((B - D)/30.6001) + + # Convert to date + day = B - D - int(30.6001 * E) + F + nday = B-D-123 + if nday <= 305: + dayofyr = nday+60 + else: + dayofyr = nday-305 + if E < 14: + month = E - 1 + else: + month = E - 13 + if month > 2: + dayofyr = dayofyr+1 + + if month > 2: + year = C - 4716 + else: + year = C - 4715 + + # Convert fractions of a day to time + (dfrac, days) = math.modf(day/1.0) + (hfrac, hours) = math.modf(dfrac * 24.0) + (mfrac, minutes) = math.modf(hfrac * 60.0) + seconds = round(mfrac * 60.0) # seconds are rounded + + if seconds > 59: + seconds = 0 + minutes = minutes + 1 + if minutes > 59: + minutes = 0 + hours = hours + 1 + if hours > 23: + hours = 0 + days = days + 1 + + return datetime(year,month,int(days),int(hours),int(minutes),int(seconds), dayofwk, dayofyr) + +def _DateFrom360Day(JD): + """ + +returns a 'datetime-like' object given Julian Day for a calendar where all +months have 30 days. +Julian Day is a fractional day with a resolution of 1 second. + + """ + + if JD < 0: + raise ValueError, 'Julian Day must be positive' + + #jd = int(360. * (year + 4716)) + int(30. * (month - 1)) + day + (F, Z) = math.modf(JD) + year = int((Z-0.5)/360.) - 4716 + dayofyr = JD - (year+4716)*360 + month = int((dayofyr-0.5)/30)+1 + day = dayofyr - (month-1)*30 + F + + # Convert fractions of a day to time + (dfrac, days) = math.modf(day/1.0) + (hfrac, hours) = math.modf(dfrac * 24.0) + (mfrac, minutes) = math.modf(hfrac * 60.0) + seconds = round(mfrac * 60.0) # seconds are rounded + + if seconds > 59: + seconds = 0 + minutes = minutes + 1 + if minutes > 59: + minutes = 0 + hours = hours + 1 + if hours > 23: + hours = 0 + days = days + 1 + + return datetime(year,month,int(days),int(hours),int(minutes),int(seconds),-1, int(dayofyr)) + +def _dateparse(timestr,format='%Y-%m-%d %H:%M:%S'): + """parse a string of the form time-units since yyyy-mm-dd hh:mm:ss + return a tuple (units, datetimeinstance)""" + timestr_split = timestr.split() + units = timestr_split[0].lower() + if units not in _units: + raise ValueError,"units must be one of 'seconds', 'minutes', 'hours' or 'days' (or singular version of these), got '%s'" % units + if timestr_split[1].lower() != 'since': + raise ValueError,"no 'since' in unit_string" + # use strptime to parse the date string. + n = timestr.find('since')+6 + #year,month,day,hour,minute,second,daywk,dayyr,tz = strptime(timestr[n:],format) + year,month,day,hour,minute,second = _parse_date(timestr[n:]) + #if dayyr == -1: dayyr=1 # must have valid day of year for strftime to work + #return units, datetime(year, month, day, hour, minute, second, daywk, dayyr) + return units, datetime(year, month, day, hour, minute, second) + +class utime: + """ +Performs conversions of netCDF time coordinate +data to/from datetime objects. + +To initialize: C{t = utime(unit_string,format='%Y-%m-%d %H:%M:%S',calendar='standard')} + +where + +B{C{unit_string}} is a string of the form +C{'time-units since <format>'} defining the time units. + +B{C{format}} is a string describing a reference time. This string is converted +to a year,month,day,hour,minute,second tuple by strptime. The default +format is C{'%Y-%m-%d %H:%M:%S'}. See the C{time.strptime} docstring for other +valid formats. + +Valid time-units are days, hours, minutes and seconds (the singular forms +are also accepted). An example unit_string would be C{'hours +since 0001-01-01 00:00:00'}. + +The B{C{calendar}} keyword describes the calendar used in the time calculations. +All the values currently defined in the U{CF metadata convention +<http://www.cgd.ucar.edu/cms/eaton/cf-metadata/CF-1.0.html#time>} are +accepted. The default is C{'standard'}, which corresponds to the mixed +Gregorian/Julian calendar used by the C{udunits library}. Valid calendars +are: + +C{'gregorian'} or C{'standard'} (default): + +Mixed Gregorian/Julian calendar as defined by udunits. + +C{'proleptic_gregorian'}: + +A Gregorian calendar extended to dates before 1582-10-15. That is, a year +is a leap year if either (i) it is divisible by 4 but not by 100 or (ii) +it is divisible by 400. + +C{'noleap'} or C{'365_day'}: + +Gregorian calendar without leap years, i.e., all years are 365 days long. +all_leap or 366_day Gregorian calendar with every year being a leap year, +i.e., all years are 366 days long. + +C{'360_day'}: + +All years are 360 days divided into 30 day months. + +C{'julian'}: + +Proleptic Julian calendar, extended to dates after 1582-10-5. A year is a +leap year if it is divisible by 4. + +The C{L{num2date}} and C{L{date2num}} class methods can used to convert datetime +instances to/from the specified time units using the specified calendar. + +The datetime instances returned by C{num2date} are 'real' python datetime +objects if the date falls in the Gregorian calendar (i.e. +C{calendar='proleptic_gregorian', 'standard'} or C{'gregorian'} and +the date is after 1582-10-15). Otherwise, they are 'phony' datetime +objects which are actually instances of C{L{netcdftime.datetime}}. This is +because the python datetime module cannot handle the weird dates in some +calendars (such as C{'360_day'} and C{'all_leap'}) which don't exist in any real +world calendar. + + +Example usage: + +>>> from netcdftime import utime +>>> from datetime import datetime +>>> cdftime = utime('hours since 0001-01-01 00:00:00') +>>> date = datetime.now() +>>> print date +2006-03-17 16:04:02.561678 +>>> +>>> t = cdftime.date2num(date) +>>> print t +17577328.0672 +>>> +>>> date = cdftime.num2date(t) +>>> print date +2006-03-17 16:04:02 +>>> + +The resolution of the transformation operation is 1 second. + +Warning: Dates between 1582-10-5 and 1582-10-15 do not exist in the +C{'standard'} or C{'gregorian'} calendars. An exception will be raised if you pass +a 'datetime-like' object in that range to the C{L{date2num}} class method. + +Words of Wisdom from the British MetOffice concerning reference dates +U{http://www.metoffice.com/research/hadleycentre/models/GDT/ch26.html}: + +"udunits implements the mixed Gregorian/Julian calendar system, as +followed in England, in which dates prior to 1582-10-15 are assumed to use +the Julian calendar. Other software cannot be relied upon to handle the +change of calendar in the same way, so for robustness it is recommended +that the reference date be later than 1582. If earlier dates must be used, +it should be noted that udunits treats 0 AD as identical to 1 AD." + +@ivar origin: datetime instance defining the origin of the netCDF time variable. +@ivar calendar: the calendar used (as specified by the C{calendar} keyword). +@ivar unit_string: a string defining the the netCDF time variable. +@ivar units: the units part of C{unit_string} (i.e. 'days', 'hours', 'seconds'). + """ + def __init__(self,unit_string,format='%Y-%m-%d %H:%M:%S',calendar='standard'): + """ +@param unit_string: a string of the form +C{'time-units since <format>'} defining the time units. + +@keyword format: a string describing a reference time. This string is converted +to a year,month,day,hour,minute,second tuple by strptime. The default +format is C{'%Y-%m-%d %H:%M:%S'}. See the C{time.strptime} docstring for other +valid formats. +Valid time-units are days, hours, minutes and seconds (the singular forms +are also accepted). An example unit_string would be C{'hours +since 0001-01-01 00:00:00'}. + +@keyword calendar: describes the calendar used in the time calculations. +All the values currently defined in the U{CF metadata convention +<http://www.cgd.ucar.edu/cms/eaton/cf-metadata/CF-1.0.html#time>} are +accepted. The default is C{'standard'}, which corresponds to the mixed +Gregorian/Julian calendar used by the C{udunits library}. Valid calendars +are: + - C{'gregorian'} or C{'standard'} (default): + Mixed Gregorian/Julian calendar as defined by udunits. + - C{'proleptic_gregorian'}: + A Gregorian calendar extended to dates before 1582-10-15. That is, a year + is a leap year if either (i) it is divisible by 4 but not by 100 or (ii) + it is divisible by 400. + - C{'noleap'} or C{'365_day'}: + Gregorian calendar without leap years, i.e., all years are 365 days long. + all_leap or 366_day Gregorian calendar with every year being a leap year, + i.e., all years are 366 days long. + -C{'360_day'}: + All years are 360 days divided into 30 day months. + -C{'julian'}: + Proleptic Julian calendar, extended to dates after 1582-10-5. A year is a + leap year if it is divisible by 4. + +@returns: A class instance which may be used for converting times from netCDF +units to datetime objects. + """ + if calendar in _calendars: + self.calendar = calendar + else: + raise ValueError, "calendar must be one of %s, got '%s'" % (str(_calendars),calendar) + units, self.origin = _dateparse(unit_string,format=format) + self.units = units + self.unit_string = unit_string + if self.calendar in ['noleap','365_day'] and self.origin.month == 2 and self.origin.day == 29: + raise ValueError, 'cannot specify a leap day as the reference time with the noleap calendar' + if self.calendar == '360_day' and self.origin.day > 30: + raise ValueError, 'there are only 30 days in every month with the 360_day calendar' + if self.calendar in ['noleap','365_day']: + self._jd0 = _NoLeapDayFromDate(self.origin) + elif self.calendar in ['all_leap','366_day']: + self._jd0 = _AllLeapFromDate(self.origin) + elif self.calendar == '360_day': + self._jd0 = _360DayFromDate(self.origin) + else: + self._jd0 = JulianDayFromDate(self.origin,calendar=self.calendar) + + def date2num(self,date): + """ +Returns C{time_value} in units described by L{unit_string}, using +the specified L{calendar}, given a 'datetime-like' object. + +Resolution is 1 second. + +If C{calendar = 'standard'} or C{'gregorian'} (indicating +that the mixed Julian/Gregorian calendar is to be used), an +exception will be raised if the 'datetime-like' object describes +a date between 1582-10-5 and 1582-10-15. + +Works for scalars, sequences and numpy arrays. +Returns a scalar if input is a scalar, else returns a numpy array. + """ + isscalar = False + try: + date[0] + except: + isscalar = True + if not isscalar: + date = numpy.array(date) + shape = date.shape + if self.calendar in ['julian','standard','gregorian','proleptic_gregorian']: + if isscalar: + jdelta = JulianDayFromDate(date,self.calendar)-self._jd0 + else: + jdelta = [JulianDayFromDate(d,self.calendar)-self._jd0 for d in date.flat] + elif self.calendar in ['noleap','365_day']: + if date.month == 2 and date.day == 29: + raise ValueError, 'there is no leap day in the noleap calendar' + if isscalar: + jdelta = _NoLeapDayFromDate(date) - self._jd0 + else: + jdelta = [_NoLeapDayFromDate(d)-self._jd0 for d in date.flat] + elif self.calendar in ['all_leap','366_day']: + if isscalar: + jdelta = _AllLeapFromDate(date) - self._jd0 + else: + jdelta = [_AllLeapFromDate(d)-self._jd0 for d in date.flat] + elif self.calendar == '360_day': + if self.calendar == '360_day' and date.day > 30: + raise ValueError, 'there are only 30 days in every month with the 360_day calendar' + if isscalar: + jdelta = _360DayFromDate(date) - self._jd0 + else: + jdelta = [_360DayFromDate(d)-self._jd0 for d in date.flat] + if not isscalar: + jdelta = numpy.array(jdelta) + if self.units in ['second','seconds']: + jdelta = jdelta*86400. + elif self.units in ['minute','minutes']: + jdelta = jdelta*1440. + elif self.units in ['hours','hours']: + jdelta = jdelta*24. + if isscalar: + return jdelta + else: + return numpy.reshape(jdelta,shape) + + def num2date(self,time_value): + """ +Return a 'datetime-like' object given a C{time_value} in units +described by L{unit_string}, using L{calendar}. + +Resolution is 1 second. + +Works for scalars, sequences and numpy arrays. +Returns a scalar if input is a scalar, else returns a numpy array. + +The datetime instances returned by C{num2date} are 'real' python datetime +objects if the date falls in the Gregorian calendar (i.e. +C{calendar='proleptic_gregorian'}, or C{calendar = 'standard'/'gregorian'} and +the date is after 1582-10-15). Otherwise, they are 'phony' datetime +objects which are actually instances of netcdftime.datetime. This is +because the python datetime module cannot handle the weird dates in some +calendars (such as C{'360_day'} and C{'all_leap'}) which don't exist in any real +world calendar. + """ + isscalar = False + try: + time_value[0] + except: + isscalar = True + if not isscalar: + time_value = numpy.array(time_value) + shape = time_value.shape + if self.units in ['second','seconds']: + jdelta = time_value/86400. + elif self.units in ['minute','minutes']: + jdelta = time_value/1440. + elif self.units in ['hours','hours']: + jdelta = time_value/24. + elif self.units in ['day','days']: + jdelta = time_value + jd = self._jd0 + jdelta + if self.calendar in ['julian','standard','gregorian','proleptic_gregorian']: + if not isscalar: + date = [DateFromJulianDay(j,self.calendar) for j in jd.flat] + else: + date = DateFromJulianDay(jd,self.calendar) + elif self.calendar in ['noleap','365_day']: + if not isscalar: + date = [_DateFromNoLeapDay(j) for j in jd.flat] + else: + date = _DateFromNoLeapDay(jd) + elif self.calendar in ['all_leap','366_day']: + if not isscalar: + date = [_DateFromAllLeap(j) for j in jd.flat] + else: + date = _DateFromAllLeap(jd) + elif self.calendar == '360_day': + if not isscalar: + date = [_DateFrom360Day(j) for j in jd.flat] + else: + date = _DateFrom360Day(jd) + if isscalar: + return date + else: + return numpy.reshape(numpy.array(date),shape) + +def _parse_date(origin): + """Parses a date string and returns a datetime object. + + This function parses the 'origin' part of the time unit. It should be + something like:: + + 2004-11-03 14:42:27.0 +2:00 + + Lots of things are optional; just the date is mandatory. + + by Roberto D'Almeida + + excerpted from coards.py - http://cheeseshop.python.org/pypi/coards/ + """ + # yyyy-mm-dd [hh:mm:ss[.s][ [+-]hh[:][mm]]] + p = re.compile( r'''(?P<year>\d{1,4}) # yyyy + - # + (?P<month>\d{1,2}) # mm or m + - # + (?P<day>\d{1,2}) # dd or d + # + (?: # [optional time and timezone] + \s # + (?P<hour>\d{1,2}) # hh or h + : # + (?P<min>\d{1,2}) # mm or m + : # + (?P<sec>\d{1,2}) # ss or s + # + (?: # [optional decisecond] + \. # . + (?P<dsec>\d) # s + )? # + (?: # [optional timezone] + \s # + (?P<ho>[+-]? # [+ or -] + \d{1,2}) # hh or h + :? # [:] + (?P<mo>\d{2})? # [mm] + )? # + )? # + $ # EOL + ''', re.VERBOSE) + + m = p.match(origin.strip()) + if m: + c = m.groupdict(0) + + # Instantiate timezone object. + #offset = int(c['ho'])*60 + int(c['mo']) + #tz = FixedOffset(offset, 'Unknown') + + #return datetime(int(c['year']), + # int(c['month']), + # int(c['day']), + # int(c['hour']), + # int(c['min']), + # int(c['sec']), + # int(c['dsec']) * 100000, + # tz) + return int(c['year']),int(c['month']),int(c['day']),int(c['hour']),int(c['min']),int(c['sec']) + + raise Exception('Invalid date origin: %s' % origin) + +# remove the unsupposed "%s" command. But don't +# do it if there's an even number of %s before the s +# because those are all escaped. Can't simply +# remove the s because the result of +# %sY +# should be %Y if %s isn't supported, not the +# 4 digit year. +_illegal_s = re.compile(r"((^|[^%])(%%)*%s)") + +def _findall(text, substr): + # Also finds overlaps + sites = [] + i = 0 + while 1: + j = text.find(substr, i) + if j == -1: + break + sites.append(j) + i=j+1 + return sites + +# Every 28 years the calendar repeats, except through century leap +# years where it's 6 years. But only if you're using the Gregorian +# calendar. ;) + +def _strftime(dt, fmt): + if _illegal_s.search(fmt): + raise TypeError("This strftime implementation does not handle %s") + # don't use strftime method at all. + #if dt.year > 1900: + # return dt.strftime(fmt) + + year = dt.year + # For every non-leap year century, advance by + # 6 years to get into the 28-year repeat cycle + delta = 2000 - year + off = 6*(delta // 100 + delta // 400) + year = year + off + + # Move to around the year 2000 + year = year + ((2000 - year)//28)*28 + timetuple = dt.timetuple() + s1 = time.strftime(fmt, (year,) + timetuple[1:]) + sites1 = _findall(s1, str(year)) + + s2 = time.strftime(fmt, (year+28,) + timetuple[1:]) + sites2 = _findall(s2, str(year+28)) + + sites = [] + for site in sites1: + if site in sites2: + sites.append(site) + + s = s1 + syear = "%4d" % (dt.year,) + for site in sites: + s = s[:site] + syear + s[site+4:] + return s Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-12-04 19:16:57 UTC (rev 4583) +++ trunk/toolkits/basemap/setup.py 2007-12-04 19:18:03 UTC (rev 4584) @@ -114,10 +114,6 @@ packages = packages + ['httplib2'] package_dirs['httlib2'] = os.path.join('lib','httplib2') -# install netcdftime -packages = packages + ['netcdftime'] -package_dirs['httlib2'] = os.path.join('lib','netcdftime') - if 'setuptools' in sys.modules: # Are we running with setuptools? # if so, need to specify all the packages in heirarchy This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-04 20:56:30
|
Revision: 4601 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4601&view=rev Author: jswhit Date: 2007-12-04 12:56:05 -0800 (Tue, 04 Dec 2007) Log Message: ----------- bump version number Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2007-12-04 20:55:04 UTC (rev 4600) +++ trunk/toolkits/basemap/Changelog 2007-12-04 20:56:05 UTC (rev 4601) @@ -1,5 +1,6 @@ +version 0.9.9 (not yet released) * added num2date and date2num functions, which use - included netcdftime module. + included netcdftime module. version 0.9.8 (svn revision 4526) * fixes for filling continents in orthographic projection. * added 'maskandscale' kwarg to NetCDFFile to Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 20:55:04 UTC (rev 4600) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-04 20:56:05 UTC (rev 4601) @@ -37,7 +37,7 @@ # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) -__version__ = '0.9.8' +__version__ = '0.9.9' # supported map projections. _projnames = {'cyl' : 'Cylindrical Equidistant', Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-12-04 20:55:04 UTC (rev 4600) +++ trunk/toolkits/basemap/setup.py 2007-12-04 20:56:05 UTC (rev 4601) @@ -134,7 +134,7 @@ package_data = {'matplotlib.toolkits.basemap':pyproj_datafiles+basemap_datafiles} setup( name = "basemap", - version = "0.9.8", + version = "0.9.9", description = "Plot data on map projections with matplotlib", long_description = """ An add-on toolkit for matplotlib that lets you plot data This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-06 23:30:32
|
Revision: 4659 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4659&view=rev Author: jswhit Date: 2007-12-06 15:30:17 -0800 (Thu, 06 Dec 2007) Log Message: ----------- add initial support for reading shapefiles with Point shapes. (from patch by Erik Andersen on the patch tracker) Modified Paths: -------------- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py Added Paths: ----------- trunk/toolkits/basemap/examples/cities.dbf trunk/toolkits/basemap/examples/cities.shp trunk/toolkits/basemap/examples/cities.shx trunk/toolkits/basemap/examples/plotcities.py Added: trunk/toolkits/basemap/examples/cities.dbf =================================================================== (Binary files differ) Property changes on: trunk/toolkits/basemap/examples/cities.dbf ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/toolkits/basemap/examples/cities.shp =================================================================== (Binary files differ) Property changes on: trunk/toolkits/basemap/examples/cities.shp ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/toolkits/basemap/examples/cities.shx =================================================================== (Binary files differ) Property changes on: trunk/toolkits/basemap/examples/cities.shx ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/toolkits/basemap/examples/plotcities.py =================================================================== --- trunk/toolkits/basemap/examples/plotcities.py (rev 0) +++ trunk/toolkits/basemap/examples/plotcities.py 2007-12-06 23:30:17 UTC (rev 4659) @@ -0,0 +1,18 @@ +import pylab as p +import numpy +from matplotlib.toolkits.basemap import Basemap as Basemap + +# cities colored by population rank. +m = Basemap() +shp_info = m.readshapefile('cities','cities') +x, y = zip(*m.cities) +pop = [] +for item in m.cities_info: + pop.append(int(item['POPULATION'])) +pop = numpy.array(pop) +poprank = numpy.argsort(pop) +m.drawcoastlines() +m.fillcontinents() +m.scatter(x,y,25,poprank,cmap=p.cm.jet_r,marker='o',faceted=False,zorder=10) +p.title('City Locations colored by Population Rank') +p.show() Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-06 22:28:27 UTC (rev 4658) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2007-12-06 23:30:17 UTC (rev 4659) @@ -1317,29 +1317,39 @@ def readshapefile(self,shapefile,name,drawbounds=True,zorder=None, linewidth=0.5,color='k',antialiased=1,ax=None): """ - read in shape file, draw boundaries on map. + read in shape file, optionally draw boundaries on map. Restrictions: - Assumes shapes are 2D - - vertices must be in geographic (lat/lon) coordinates. + - works for Point, MultiPoint, Polyline and Polygon shapes. + - vertices/points must be in geographic (lat/lon) coordinates. + Mandatory Arguments: + shapefile - path to shapefile components. Example: shapefile='/home/jeff/esri/world_borders' assumes that world_borders.shp, world_borders.shx and world_borders.dbf live in /home/jeff/esri. name - name for Basemap attribute to hold the shapefile - vertices in native map projection coordinates. + vertices or points in native map projection coordinates. Class attribute name+'_info' is a list of dictionaries, one for each shape, containing attributes of each shape from dbf file. - For example, if name='counties', self.counties - will be a list of vertices for each shape in map projection + For example, if name='counties', self.counties will be + a list of x,y vertices for each shape in map projection coordinates and self.counties_info will be a list of dictionaries - with shape attributes. Rings in individual shapes are split out - into separate polygons. Additional keys + with shape attributes. Rings in individual Polygon shapes are split + out into separate polygons, and. additional keys 'RINGNUM' and 'SHAPENUM' are added to shape attribute dictionary. - drawbounds - draw boundaries of shapes (default True) - zorder = shape boundary zorder (if not specified, default for LineCollection - is used). + + Optional Keyword Arguments (only relevant for Polyline + and Polygon shape types, for Point and MultiPoint shapes they + are ignored): + + drawbounds - draw boundaries of shapes (default True). Only + relevant for Polyline and Polygon shape types, for Point + and MultiPoint types no drawing is done. + zorder = shape boundary zorder (if not specified, + default for LineCollection is used). linewidth - shape boundary line width (default 0.5) color - shape boundary line color (default black) antialiased - antialiasing switch for shape boundaries (default True). @@ -1365,52 +1375,64 @@ info = shp.info() if info[1] not in [1,3,5,8]: raise ValueError, 'readshapefile can only handle 2D shape types' - shpsegs = [] - shpinfo = [] - for npoly in range(shp.info()[0]): - shp_object = shp.read_object(npoly) - verts = shp_object.vertices() - rings = len(verts) - for ring in range(rings): - lons, lats = zip(*verts[ring]) - if max(lons) > 721. or min(lons) < -721. or max(lats) > 91. or min(lats) < -91: - msg=dedent(""" - shapefile must have lat/lon vertices - it looks like this one has vertices - in map projection coordinates. You can convert the shapefile to geographic - coordinates using the shpproj utility from the shapelib tools - (http://shapelib.maptools.org/shapelib-tools.html)""") - raise ValueError,msg - x, y = self(lons, lats) - shpsegs.append(zip(x,y)) - if ring == 0: - shapedict = dbf.read_record(npoly) - # add information about ring number to dictionary. - shapedict['RINGNUM'] = ring+1 - shapedict['SHAPENUM'] = npoly+1 - shpinfo.append(shapedict) - # draw shape boundaries using LineCollection. - if drawbounds: - # get current axes instance (if none specified). - if ax is None and self.ax is None: - try: - ax = pylab.gca() - except: - import pylab - ax = pylab.gca() - elif ax is None and self.ax is not None: - ax = self.ax - # make LineCollections for each polygon. - lines = LineCollection(shpsegs,antialiaseds=(1,)) - lines.set_color(color) - lines.set_linewidth(linewidth) - if zorder is not None: - lines.set_zorder(zorder) - ax.add_collection(lines) - # set axes limits to fit map region. - self.set_axes_limits(ax=ax) - # save segments/polygons and shape attribute dicts as class attributes. - self.__dict__[name]=shpsegs - self.__dict__[name+'_info']=shpinfo + msg=dedent(""" + shapefile must have lat/lon vertices - it looks like this one has vertices + in map projection coordinates. You can convert the shapefile to geographic + coordinates using the shpproj utility from the shapelib tools + (http://shapelib.maptools.org/shapelib-tools.html)""") + if info[1] in [1,8]: # a Point or Multi-Point file. + coords = [shp.read_object(i).vertices()[0] + for i in range(shp.info()[0])] + attributes = [dbf.read_record(i) + for i in range(shp.info()[0])] + lons, lats = zip(*coords) + if max(lons) > 721. or min(lons) < -721. or max(lats) > 91. or min(lats) < -91: + raise ValueError,msg + x,y = self(lons, lats) + self.__dict__[name]=zip(x,y) + self.__dict__[name+'_info']=attributes + else: # a Polyline or Polygon file. + shpsegs = [] + shpinfo = [] + for npoly in range(shp.info()[0]): + shp_object = shp.read_object(npoly) + verts = shp_object.vertices() + rings = len(verts) + for ring in range(rings): + lons, lats = zip(*verts[ring]) + if max(lons) > 721. or min(lons) < -721. or max(lats) > 91. or min(lats) < -91: + raise ValueError,msg + x, y = self(lons, lats) + shpsegs.append(zip(x,y)) + if ring == 0: + shapedict = dbf.read_record(npoly) + # add information about ring number to dictionary. + shapedict['RINGNUM'] = ring+1 + shapedict['SHAPENUM'] = npoly+1 + shpinfo.append(shapedict) + # draw shape boundaries using LineCollection. + if drawbounds: + # get current axes instance (if none specified). + if ax is None and self.ax is None: + try: + ax = pylab.gca() + except: + import pylab + ax = pylab.gca() + elif ax is None and self.ax is not None: + ax = self.ax + # make LineCollections for each polygon. + lines = LineCollection(shpsegs,antialiaseds=(1,)) + lines.set_color(color) + lines.set_linewidth(linewidth) + if zorder is not None: + lines.set_zorder(zorder) + ax.add_collection(lines) + # set axes limits to fit map region. + self.set_axes_limits(ax=ax) + # save segments/polygons and shape attribute dicts as class attributes. + self.__dict__[name]=shpsegs + self.__dict__[name+'_info']=shpinfo shp.close() dbf.close() return info This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-12 16:57:32
|
Revision: 4711 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4711&view=rev Author: jswhit Date: 2007-12-12 08:57:09 -0800 (Wed, 12 Dec 2007) Log Message: ----------- add plotsst.py example. Modified Paths: -------------- trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/examples/README trunk/toolkits/basemap/examples/run_all.py Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2007-12-12 16:43:29 UTC (rev 4710) +++ trunk/toolkits/basemap/MANIFEST.in 2007-12-12 16:57:09 UTC (rev 4711) @@ -43,6 +43,7 @@ include examples/geos_demo_2.py include examples/200706041200-msg-ch01-SAfrica.jpg include examples/fcstmaps.py +include examples/plotsst.py include examples/wiki_example.py include examples/fillstates.py include examples/run_all.py Modified: trunk/toolkits/basemap/examples/README =================================================================== --- trunk/toolkits/basemap/examples/README 2007-12-12 16:43:29 UTC (rev 4710) +++ trunk/toolkits/basemap/examples/README 2007-12-12 16:57:09 UTC (rev 4711) @@ -13,6 +13,9 @@ customticks.py shows how to create custom tick labels for a cylindrical projection. +plotcities.py shows how to read and plot data from a shapefile containing +Point data. + plotmap.py is the example on the matplotlib 'screenshots' page (included in test.py) which shows the ETOPO topography as an image on a Lambert Conformal projection (using imshow). @@ -52,6 +55,9 @@ fcstmaps.py is a sample multi-panel plot that accesses data over http using the dap module. An internet connection is required. +plotsst.py also uses dap client to access the data, and also illustrates +how the NetCDFFile function deals with missing values. + wiki_example.py is the example from the MatplotlibCookbook scipy wiki page (http://www.scipy.org/wikis/topical_software/MatplotlibCookbook/wikipage_view). Modified: trunk/toolkits/basemap/examples/run_all.py =================================================================== --- trunk/toolkits/basemap/examples/run_all.py 2007-12-12 16:43:29 UTC (rev 4710) +++ trunk/toolkits/basemap/examples/run_all.py 2007-12-12 16:57:09 UTC (rev 4711) @@ -2,6 +2,7 @@ test_files = glob.glob('*.py') test_files.remove('run_all.py') test_files.remove('fcstmaps.py') +test_files.remove('plotsst.py') test_files.remove('testgdal.py') test_files.remove('pnganim.py') test_files.remove('warpimage.py') This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2007-12-15 02:33:30
|
Revision: 4738 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4738&view=rev Author: jswhit Date: 2007-12-14 18:33:26 -0800 (Fri, 14 Dec 2007) Log Message: ----------- remove setuptools requirement. Modified Paths: -------------- trunk/toolkits/basemap/examples/plotsst.py trunk/toolkits/basemap/setup-data.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/examples/plotsst.py =================================================================== --- trunk/toolkits/basemap/examples/plotsst.py 2007-12-15 00:12:13 UTC (rev 4737) +++ trunk/toolkits/basemap/examples/plotsst.py 2007-12-15 02:33:26 UTC (rev 4738) @@ -3,7 +3,7 @@ # read in sea-surface temperature and ice data # can be a local file, a URL for a remote opendap dataset, # or (if PyNIO is installed) a GRIB or HDF file. -ncfile = NetCDFFile('http://nomads.ncdc.noaa.gov:8085/thredds/dodsC/oisst/2007/AVHRR/sst4-navy-eot.20071201.nc') +ncfile = NetCDFFile('http://nomads.ncdc.noaa.gov:8085/thredds/dodsC/oisst/2007/AVHRR/sst4-navy-eot.20071213.nc') # read sst. Will automatically create a masked array using # missing_value variable attribute. sst = ncfile.variables['sst'][:] @@ -33,7 +33,7 @@ # missing values over land will show up this color. m.drawmapboundary(fill_color='0.3') # plot ice, then with pcolor -im1 = m.pcolor(x,y,sst,shading='flat',cmap=pylab.cm.gist_ncar) +im1 = m.pcolor(x,y,sst,shading='flat',cmap=pylab.cm.jet) im2 = m.pcolor(x,y,ice,shading='flat',cmap=pylab.cm.gist_gray) # draw parallels and meridians, but don't bother labelling them. m.drawparallels(numpy.arange(-90.,120.,30.)) Modified: trunk/toolkits/basemap/setup-data.py =================================================================== --- trunk/toolkits/basemap/setup-data.py 2007-12-15 00:12:13 UTC (rev 4737) +++ trunk/toolkits/basemap/setup-data.py 2007-12-15 02:33:26 UTC (rev 4738) @@ -1,13 +1,4 @@ import sys, glob, os -if 'setuptools' in sys.modules: -# Are we running with setuptools? -# if so, need to specify all the packages in heirarchy - additional_params = {'namespace_packages' : ['matplotlib.toolkits']} - packages.extend(['matplotlib', 'matplotlib.toolkits']) - setup = setuptools.setup -else: - additional_params = {} - from distutils.core import setup packages = ['matplotlib.toolkits.basemap.data'] package_dirs = {'':'lib'} boundaryfiles = glob.glob("lib/matplotlib/toolkits/basemap/data/*_f.dat") Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2007-12-15 00:12:13 UTC (rev 4737) +++ trunk/toolkits/basemap/setup.py 2007-12-15 02:33:26 UTC (rev 4738) @@ -1,16 +1,5 @@ import sys, glob, os from distutils.core import setup -major, minor1, minor2, s, tmp = sys.version_info -if major==2 and minor1<=3: - # setuptools monkeypatches distutils.core.Distribution to support - # package_data - try: import setuptools - except ImportError: - raise SystemExit(""" -matplotlib requires setuptools for installation. Please download -http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if -you are doing a system wide install) to install the proper version of -setuptools for your system""") from distutils.core import Extension from distutils.util import convert_path import numpy This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2008-01-08 13:32:04
|
Revision: 4806 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4806&view=rev Author: jswhit Date: 2008-01-08 05:31:49 -0800 (Tue, 08 Jan 2008) Log Message: ----------- make python 2.4 a requirement, bump version to 0.9.9.1 Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/MANIFEST.in trunk/toolkits/basemap/README trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py trunk/toolkits/basemap/setup.py Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2008-01-08 03:11:38 UTC (rev 4805) +++ trunk/toolkits/basemap/Changelog 2008-01-08 13:31:49 UTC (rev 4806) @@ -1,3 +1,7 @@ +version 0.9.9.1 (svn revision 4805) + * require python 2.4 (really only needed for building). + Once namespace packages are re-enabled in matplotlib, + python 2.3 should work again. version 0.9.9 (svn revision 4799) * updated proj4 sources to version 4.6.0. * removed hidden dependency on setuptools (in dap module). Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2008-01-08 03:11:38 UTC (rev 4805) +++ trunk/toolkits/basemap/MANIFEST.in 2008-01-08 13:31:49 UTC (rev 4806) @@ -8,7 +8,6 @@ include KNOWN_BUGS include Changelog include setup.py -include setupegg.py include src/* include examples/simpletest.py include examples/hires.py Modified: trunk/toolkits/basemap/README =================================================================== --- trunk/toolkits/basemap/README 2008-01-08 03:11:38 UTC (rev 4805) +++ trunk/toolkits/basemap/README 2008-01-08 13:31:49 UTC (rev 4806) @@ -5,7 +5,7 @@ **Requirements** -python 2.3 (or higher) +python 2.4 (or higher) matplotlib 0.90 (or higher) Modified: trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2008-01-08 03:11:38 UTC (rev 4805) +++ trunk/toolkits/basemap/lib/matplotlib/toolkits/basemap/basemap.py 2008-01-08 13:31:49 UTC (rev 4806) @@ -40,7 +40,7 @@ # basemap data files now installed in lib/matplotlib/toolkits/basemap/data basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) -__version__ = '0.9.9' +__version__ = '0.9.9.1' # supported map projections. _projnames = {'cyl' : 'Cylindrical Equidistant', Modified: trunk/toolkits/basemap/setup.py =================================================================== --- trunk/toolkits/basemap/setup.py 2008-01-08 03:11:38 UTC (rev 4805) +++ trunk/toolkits/basemap/setup.py 2008-01-08 13:31:49 UTC (rev 4806) @@ -1,19 +1,19 @@ -import sys, glob, os -from distutils.core import setup +import sys, glob, os, numpy major, minor1, minor2, s, tmp = sys.version_info if major==2 and minor1<=3: # setuptools monkeypatches distutils.core.Distribution to support # package_data - try: import setuptools - except ImportError: - raise SystemExit(""" -matplotlib requires setuptools for installation. Please download -http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if -you are doing a system wide install) to install the proper version of -setuptools for your system""") + #try: import setuptools + #except ImportError: + # raise SystemExit(""" +#matplotlib requires setuptools for installation. Please download +#http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if +#you are doing a system wide install) to install the proper version of +#setuptools for your system""") + raise SystemExit("""The basemap toolkit requires python 2.4.""") +from distutils.core import setup from distutils.core import Extension from distutils.util import convert_path -import numpy def dbf_macros(): """Return the macros to define when compiling the dbflib wrapper. @@ -147,7 +147,7 @@ package_data = {'matplotlib.toolkits.basemap':pyproj_datafiles+basemap_datafiles} setup( name = "basemap", - version = "0.9.9", + version = "0.9.9.1", description = "Plot data on map projections with matplotlib", long_description = """ An add-on toolkit for matplotlib that lets you plot data This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2008-01-10 13:59:50
|
Revision: 4847 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4847&view=rev Author: jswhit Date: 2008-01-10 05:59:29 -0800 (Thu, 10 Jan 2008) Log Message: ----------- move matplotlit/toolkits to mpl_toolkits Modified Paths: -------------- trunk/toolkits/basemap/API_CHANGES trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/setup.py trunk/toolkits/basemap/setupegg.py Added Paths: ----------- trunk/toolkits/basemap/lib/mpl_toolkits/ trunk/toolkits/basemap/lib/mpl_toolkits/__init__.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/ trunk/toolkits/basemap/lib/mpl_toolkits/basemap/__init__.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_geod.so trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_proj.so trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/cm.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/ trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/5minmask.bin trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/GL27 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/README trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/__init__.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countries_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countries_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countries_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countries_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countries_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countriesmeta_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countriesmeta_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countriesmeta_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countriesmeta_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/countriesmeta_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/epsg trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/esri trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/esri.extra trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhs_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhs_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhs_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhs_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhs_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhsmeta_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhsmeta_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhsmeta_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhsmeta_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/gshhsmeta_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/nad.lst trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/nad27 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/nad83 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/ntv2_out.dist trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/other.extra trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/pj_out27.dist trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/pj_out83.dist trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/proj_def.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/rivers_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/rivers_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/rivers_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/rivers_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/rivers_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/riversmeta_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/riversmeta_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/riversmeta_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/riversmeta_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/riversmeta_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/states_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/states_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/states_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/states_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/states_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/statesmeta_c.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/statesmeta_f.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/statesmeta_h.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/statesmeta_i.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/statesmeta_l.dat trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/td_out.dist trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/test27 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/test83 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/testntv2 trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/testvarious trunk/toolkits/basemap/lib/mpl_toolkits/basemap/data/world Removed Paths: ------------- trunk/toolkits/basemap/lib/matplotlib/ Modified: trunk/toolkits/basemap/API_CHANGES =================================================================== --- trunk/toolkits/basemap/API_CHANGES 2008-01-10 13:38:37 UTC (rev 4846) +++ trunk/toolkits/basemap/API_CHANGES 2008-01-10 13:59:29 UTC (rev 4847) @@ -1,3 +1,5 @@ +version 0.99: now must be imported as mpl_toolkits.basemap instead + of matplotlib.toolkits.basemap. version 0.9.8: remove linestyle kwarg from drawparallels and drawmeridians. add fill_color kwarg to drawmapboundary. version 0.9.7: added lake_color kwarg to fillcontinents. Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2008-01-10 13:38:37 UTC (rev 4846) +++ trunk/toolkits/basemap/Changelog 2008-01-10 13:59:29 UTC (rev 4847) @@ -1,3 +1,10 @@ +version 0.9.9.2 + * Now lives in mpl_toolkits.basemap. Instead + of 'from matplotlib.toolkits.basemap import Basemap', + use 'from mpl_toolkits.basemap import Basemap'. + All examples changed. Uses matplotlib mpl_toolkits + namespace package, so basemap can now be installed + if matplotlib is installed as an egg. version 0.9.9.1 (svn revision 4808) * require python 2.4 (really only needed for building). Once namespace packages are re-enabled in matplotlib, Added: trunk/toolkits/basemap/lib/mpl_toolkits/__init__.py =================================================================== --- trunk/toolkits/basemap/lib/mpl_toolkits/__init__.py (rev 0) +++ trunk/toolkits/basemap/lib/mpl_toolkits/__init__.py 2008-01-10 13:59:29 UTC (rev 4847) @@ -0,0 +1,4 @@ +try: + __import__('pkg_resources').declare_namespace(__name__) +except ImportError: + pass # must not have setuptools Added: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/__init__.py =================================================================== --- trunk/toolkits/basemap/lib/mpl_toolkits/basemap/__init__.py (rev 0) +++ trunk/toolkits/basemap/lib/mpl_toolkits/basemap/__init__.py 2008-01-10 13:59:29 UTC (rev 4847) @@ -0,0 +1,2 @@ +from basemap import __doc__, __version__ +from basemap import * Added: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_geod.so =================================================================== (Binary files differ) Property changes on: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_geod.so ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_proj.so =================================================================== (Binary files differ) Property changes on: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/_proj.so ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py (rev 0) +++ trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py 2008-01-10 13:59:29 UTC (rev 4847) @@ -0,0 +1,2981 @@ +""" +Module for plotting data on maps with matplotlib. + +Contains the Basemap class (which does most of the +heavy lifting), and the following functions: + +NetCDFFile: Read local and remote NetCDF datasets. + +interp: bilinear interpolation between rectilinear grids. + +shiftgrid: shifts global lat/lon grids east or west. + +addcyclic: Add cyclic (wraparound) point in longitude. + +num2date: convert from a numeric time value to a datetime object. + +date2num: convert from a datetime object to a numeric time value. +""" +from matplotlib import __version__ as _matplotlib_version +from matplotlib.cbook import is_scalar, dedent +# check to make sure matplotlib is not too old. +_mpl_required_version = '0.98' +if _matplotlib_version < _mpl_required_version: + msg = dedent(""" + your matplotlib is too old - basemap requires version %s or + higher, you have version %s""" % + (_mpl_required_version,_matplotlib_version)) + raise ImportError(msg) +from matplotlib import rcParams, is_interactive, _pylab_helpers +from matplotlib.collections import LineCollection +from matplotlib.patches import Ellipse, Circle, Polygon +from matplotlib.lines import Line2D +import pyproj, sys, os, math, dbflib +from proj import Proj +import numpy as npy +from numpy import linspace, squeeze, ma +from shapelib import ShapeFile +import _geos, pupynere, netcdftime + +# basemap data files now installed in lib/matplotlib/toolkits/basemap/data +basemap_datadir = os.sep.join([os.path.dirname(__file__), 'data']) + +__version__ = '0.99' + +# supported map projections. +_projnames = {'cyl' : 'Cylindrical Equidistant', + 'merc' : 'Mercator', + 'tmerc' : 'Transverse Mercator', + 'omerc' : 'Oblique Mercator', + 'mill' : 'Miller Cylindrical', + 'lcc' : 'Lambert Conformal', + 'laea' : 'Lambert Azimuthal Equal Area', + 'nplaea' : 'North-Polar Lambert Azimuthal', + 'splaea' : 'South-Polar Lambert Azimuthal', + 'eqdc' : 'Equidistant Conic', + 'aeqd' : 'Azimuthal Equidistant', + 'npaeqd' : 'North-Polar Azimuthal Equidistant', + 'spaeqd' : 'South-Polar Azimuthal Equidistant', + 'aea' : 'Albers Equal Area', + 'stere' : 'Stereographic', + 'npstere' : 'North-Polar Stereographic', + 'spstere' : 'South-Polar Stereographic', + 'cass' : 'Cassini-Soldner', + 'poly' : 'Polyconic', + 'ortho' : 'Orthographic', + 'geos' : 'Geostationary', + 'sinu' : 'Sinusoidal', + 'moll' : 'Mollweide', + 'robin' : 'Robinson', + 'gnom' : 'Gnomonic', + } +supported_projections = [] +for _items in _projnames.iteritems(): + supported_projections.append("'%s' = %s\n" % (_items)) +supported_projections = ''.join(supported_projections) + +# projection specific parameters. +projection_params = {'cyl' : 'corners only (no width/height)', + 'merc' : 'corners plus lat_ts (no width/height)', + 'tmerc' : 'lon_0,lat_0', + 'omerc' : 'lon_0,lat_0,lat_1,lat_2,lon_1,lon_2,no width/height', + 'mill' : 'corners only (no width/height)', + 'lcc' : 'lon_0,lat_0,lat_1,lat_2', + 'laea' : 'lon_0,lat_0', + 'nplaea' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'splaea' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'eqdc' : 'lon_0,lat_0,lat_1,lat_2', + 'aeqd' : 'lon_0,lat_0', + 'npaeqd' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'spaeqd' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'aea' : 'lon_0,lat_0,lat_1', + 'stere' : 'lon_0,lat_0,lat_ts', + 'npstere' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'spstere' : 'bounding_lat,lon_0,lat_0,no corners or width/height', + 'cass' : 'lon_0,lat_0', + 'poly' : 'lon_0,lat_0', + 'ortho' : 'lon_0,lat_0', + 'geos' : 'lon_0,lat_0,satellite_height', + 'sinu' : 'lon_0,lat_0,no corners or width/height', + 'moll' : 'lon_0,lat_0,no corners or width/height', + 'robin' : 'lon_0,lat_0,no corners or width/height', + 'gnom' : 'lon_0,lat_0', + } + +# The __init__ docstring is pulled out here because it is so long; +# Having it in the usual place makes it hard to get from the +# __init__ argument list to the code that uses the arguments. +_Basemap_init_doc = """ + create a Basemap instance. + + This sets up a basemap with specified map projection. + and creates the coastline data structures in native map projection + coordinates. + + arguments: + + projection - map projection. Supported projections are: +%(supported_projections)s + Default is 'cyl'. + + For most map projections, the map projection region can either be + specified by setting these keywords: + + llcrnrlon - longitude of lower left hand corner of the desired map domain (degrees). + llcrnrlat - latitude of lower left hand corner of the desired map domain (degrees). + urcrnrlon - longitude of upper right hand corner of the desired map domain (degrees). + urcrnrlat - latitude of upper right hand corner of the desired map domain (degrees). + + or these keywords: + + width - width of desired map domain in projection coordinates (meters). + height - height of desired map domain in projection coordinates (meters). + lon_0 - center of desired map domain (in degrees). + lat_0 - center of desired map domain (in degrees). + + For 'sinu', 'moll', 'npstere', 'spstere', 'nplaea', 'splaea', 'nplaea', + 'splaea', 'npaeqd', 'spaeqd' or 'robin', the values of + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat,width and height are ignored + (because either they are computed internally, or entire globe is + always plotted). For the cylindrical projections + ('cyl','merc' and 'mill'), the default is to use + llcrnrlon=-180,llcrnrlat=-90, urcrnrlon=180 and urcrnrlat=90). For all other + projections except 'ortho' and 'geos', either the lat/lon values of the + corners or width and height must be specified by the user. + For 'ortho' and 'geos', the lat/lon values of the corners may be specified, + but if they are not, the entire globe is plotted. + + resolution - resolution of boundary database to use. Can be 'c' (crude), + 'l' (low), 'i' (intermediate), 'h' (high), 'f' (full) or None. + If None, no boundary data will be read in (and class methods + such as drawcoastlines will raise an exception if invoked). + Resolution drops off by roughly 80%% + between datasets. Higher res datasets are much slower to draw. + Default 'c'. Coastline data is from the GSHHS + (http://www.soest.hawaii.edu/wessel/gshhs/gshhs.html). + State, country and river datasets from the Generic Mapping + Tools (http://gmt.soest.hawaii.edu). + + area_thresh - coastline or lake with an area smaller than area_thresh + in km^2 will not be plotted. Default 10000,1000,100,10,1 for resolution + 'c','l','i','h','f'. + + rsphere - radius of the sphere used to define map projection (default + 6370997 meters, close to the arithmetic mean radius of the earth). If + given as a sequence, the first two elements are interpreted as + the the radii of the major and minor axes of an ellipsoid. Note: sometimes + an ellipsoid is specified by the major axis and an 'inverse flattening + parameter' (if). The minor axis (b) can be computed from the major axis (a) + and the inverse flattening parameter using the formula if = a/(a-b). + + suppress_ticks - suppress automatic drawing of axis ticks and labels + in map projection coordinates. Default False, so parallels and meridians + can be labelled instead. If parallel or meridian labelling is requested + (using drawparallels and drawmeridians methods), automatic tick labelling + will be supressed even is suppress_ticks=False. suppress_ticks=False + is useful if you want to use your own custom tick formatter, or + if you want to let matplotlib label the axes in meters + using native map projection coordinates + + anchor - determines how map is placed in axes rectangle (passed to + axes.set_aspect). Default is 'C', which means map is centered. + Allowed values are ['C', 'SW', 'S', 'SE', 'E', 'NE', 'N', 'NW', 'W']. + + ax - set default axes instance (default None - pylab.gca() may be used + to get the current axes instance). If you don't want pylab to be imported, + you can either set this to a pre-defined axes instance, or use the 'ax' + keyword in each Basemap method call that does drawing. In the first case, + all Basemap method calls will draw to the same axes instance. In the + second case, you can draw to different axes with the same Basemap instance. + You can also use the 'ax' keyword in individual method calls to + selectively override the default axes instance. + + The following parameters are map projection parameters which all default to + None. Not all parameters are used by all projections, some are ignored. + The module variable 'projection_params' is a dictionary which + lists which parameters apply to which projections. + + lat_ts - latitude of true scale for mercator projection, optional + for stereographic projection. + lat_1 - first standard parallel for lambert conformal, albers + equal area projection and equidistant conic projections. Latitude of one + of the two points on the projection centerline for oblique mercator. + If lat_1 is not given, but lat_0 is, lat_1 is set to lat_0 for + lambert conformal, albers equal area and equidistant conic. + lat_2 - second standard parallel for lambert conformal, albers + equal area projection and equidistant conic projections. Latitude of one + of the two points on the projection centerline for oblique mercator. + If lat_2 is not given, it is set to lat_1 for + lambert conformal, albers equal area and equidistant conic. + lon_1 - longitude of one of the two points on the projection centerline + for oblique mercator. + lon_2 - longitude of one of the two points on the projection centerline + for oblique mercator. + lat_0 - central latitude (y-axis origin) - used by all projections, + Must be equator for mercator projection. + lon_0 - central meridian (x-axis origin) - used by all projections, + boundinglat - bounding latitude for pole-centered projections (npstere,spstere, + nplaea,splaea,npaeqd,spaeqd). These projections are square regions centered + on the north or south pole. The longitude lon_0 is at 6-o'clock, and the + latitude circle boundinglat is tangent to the edge of the map at lon_0. + satellite_height - height of satellite (in m) above equator - + only relevant for geostationary projections ('geos'). Default 35,786 km. + + Here are the most commonly used class methods (see the docstring + for each for more details): + + To draw a graticule grid (labelled latitude and longitude lines) + use the drawparallels and drawmeridians methods. + + To draw coastline, rivers and political boundaries, use the + the drawcontinents, drawrivers, drawcountries and drawstates methods. + + To fill the continents and inland lakes, use the fillcontinents method. + + To draw the boundary of the map projection region, and fill the + interior a certain color, use the drawmapboundary method. + + The contour, contourf, pcolor, pcolormesh, plot, scatter + quiver and imshow methods use the corresponding matplotlib axes + methods to draw on the map. + + The transform_scalar method can be used to interpolate regular + lat/lon grids of scalar data to native map projection grids. + + The transform_vector method can be used to interpolate and rotate + regular lat/lon grids of vector data to native map projection grids. + + The rotate_vector method rotates a vector field from lat/lon + coordinates into map projections coordinates, without doing any + interpolation. + + The readshapefile method can be used to read in data from ESRI + shapefiles. + + The drawgreatcircle method draws great circles on the map. +""" % locals() + +# unsupported projection error message. +_unsupported_projection = ["'%s' is an unsupported projection.\n"] +_unsupported_projection.append("The supported projections are:\n") +_unsupported_projection.append(supported_projections) +_unsupported_projection = ''.join(_unsupported_projection) + +def _validated_ll(param, name, minval, maxval): + param = float(param) + if param > maxval or param < minval: + raise ValueError('%s must be between %f and %f degrees' % + (name, minval, maxval)) + return param + +def _insert_validated(d, param, name, minval, maxval): + if param is not None: + d[name] = _validated_ll(param, name, minval, maxval) + +class Basemap(object): + """ + Class for plotting data on map projections with matplotlib. + See __init__ docstring for details on how to create a class + instance for a given map projection. + + Useful instance variables: + + projection - map projection. Print the module variable + "supported_projections" to see a list. + aspect - map aspect ratio (size of y dimension / size of x dimension). + llcrnrlon - longitude of lower left hand corner of the desired map domain. + llcrnrlon - latitude of lower left hand corner of the desired map domain. + urcrnrlon - longitude of upper right hand corner of the desired map domain. + urcrnrlon - latitude of upper right hand corner of the desired map domain. + llcrnrx,llcrnry,urcrnrx,urcrnry - corners of map domain in projection coordinates. + rmajor,rminor - equatorial and polar radii of ellipsoid used (in meters). + resolution - resolution of boundary dataset being used ('c' for crude, + 'l' for low, etc.). If None, no boundary dataset is associated with the + Basemap instance. + srs - a string representing the 'spatial reference system' for the map + projection as defined by PROJ.4. + + Example Usage: + + >>> from mpl_toolkits.basemap import Basemap + >>> from pylab import load, meshgrid, title, arange, show + >>> # read in topo data (on a regular lat/lon grid) + >>> etopo = load('etopo20data.gz') + >>> lons = load('etopo20lons.gz') + >>> lats = load('etopo20lats.gz') + >>> # create Basemap instance for Robinson projection. + >>> m = Basemap(projection='robin',lon_0=0.5*(lons[0]+lons[-1])) + >>> # compute native map projection coordinates for lat/lon grid. + >>> x, y = m(*meshgrid(lons,lats)) + >>> # make filled contour plot. + >>> cs = m.contourf(x,y,etopo,30,cmap=cm.jet) + >>> m.drawcoastlines() # draw coastlines + >>> m.drawmapboundary() # draw a line around the map region + >>> m.drawparallels(arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels + >>> m.drawmeridians(arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians + >>> title('Robinson Projection') # add a title + >>> show() + + [this example (simpletest.py) plus many others can be found in the + examples directory of source distribution. The "OO" version of this + example (which does not use pylab) is called "simpletest_oo.py".] + """ + + def __init__(self, llcrnrlon=None, llcrnrlat=None, + urcrnrlon=None, urcrnrlat=None, + width=None, height=None, + projection='cyl', resolution='c', + area_thresh=None, rsphere=6370997.0, + lat_ts=None, + lat_1=None, lat_2=None, + lat_0=None, lon_0=None, + lon_1=None, lon_2=None, + suppress_ticks=True, + satellite_height=35786000, + boundinglat=None, + anchor='C', + ax=None): + # docstring is added after __init__ method definition + + # where to put plot in figure (default is 'C' or center) + self.anchor = anchor + # map projection. + self.projection = projection + + # set up projection parameter dict. + projparams = {} + projparams['proj'] = projection + try: + if rsphere[0] > rsphere[1]: + projparams['a'] = rsphere[0] + projparams['b'] = rsphere[1] + else: + projparams['a'] = rsphere[1] + projparams['b'] = rsphere[0] + except: + if projection == 'tmerc': + # use bR_a instead of R because of obscure bug + # in proj4 for tmerc projection. + projparams['bR_a'] = rsphere + else: + projparams['R'] = rsphere + # set units to meters. + projparams['units']='m' + # check for sane values of lon_0, lat_0, lat_ts, lat_1, lat_2 + _insert_validated(projparams, lat_0, 'lat_0', -90, 90) + _insert_validated(projparams, lat_1, 'lat_1', -90, 90) + _insert_validated(projparams, lat_2, 'lat_2', -90, 90) + _insert_validated(projparams, lat_ts, 'lat_ts', -90, 90) + _insert_validated(projparams, lon_0, 'lon_0', -360, 720) + _insert_validated(projparams, lon_1, 'lon_1', -360, 720) + _insert_validated(projparams, lon_2, 'lon_2', -360, 720) + if projection == 'geos': + projparams['h'] = satellite_height + # check for sane values of projection corners. + using_corners = (None not in [llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat]) + if using_corners: + self.llcrnrlon = _validated_ll(llcrnrlon, 'llcrnrlon', -360, 720) + self.urcrnrlon = _validated_ll(urcrnrlon, 'urcrnrlon', -360, 720) + self.llcrnrlat = _validated_ll(llcrnrlat, 'llcrnrlat', -90, 90) + self.urcrnrlat = _validated_ll(urcrnrlat, 'urcrnrlat', -90, 90) + + # for each of the supported projections, + # compute lat/lon of domain corners + # and set values in projparams dict as needed. + + if projection in ['lcc', 'eqdc', 'aea']: + # if lat_0 is given, but not lat_1, + # set lat_1=lat_0 + if lat_1 is None and lat_0 is not None: + lat_1 = lat_0 + projparams['lat_1'] = lat_1 + if lat_1 is None or lon_0 is None: + raise ValueError('must specify lat_1 or lat_0 and lon_0 for %s basemap (lat_2 is optional)' % _projnames[projection]) + if lat_2 is None: + projparams['lat_2'] = lat_1 + if not using_corners: + if width is None or height is None: + raise ValueError, 'must either specify lat/lon values of corners (llcrnrlon,llcrnrlat,ucrnrlon,urcrnrlat) in degrees or width and height in meters' + if lon_0 is None or lat_0 is None: + raise ValueError, 'must specify lon_0 and lat_0 when using width, height to specify projection region' + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat = _choosecorners(width,height,**projparams) + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection == 'stere': + if lat_0 is None or lon_0 is None: + raise ValueError, 'must specify lat_0 and lon_0 for Stereographic basemap (lat_ts is optional)' + if not using_corners: + if width is None or height is None: + raise ValueError, 'must either specify lat/lon values of corners (llcrnrlon,llcrnrlat,ucrnrlon,urcrnrlat) in degrees or width and height in meters' + if lon_0 is None or lat_0 is None: + raise ValueError, 'must specify lon_0 and lat_0 when using width, height to specify projection region' + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat = _choosecorners(width,height,**projparams) + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection in ['spstere', 'npstere', + 'splaea', 'nplaea', + 'spaeqd', 'npaeqd']: + if boundinglat is None or lon_0 is None: + raise ValueError('must specify boundinglat and lon_0 for %s basemap' % _projnames[projection]) + if projection[0] == 's': + sgn = -1 + else: + sgn = 1 + rootproj = projection[2:] + projparams['proj'] = rootproj + if rootproj == 'stere': + projparams['lat_ts'] = sgn * 90. + projparams['lat_0'] = sgn * 90. + self.llcrnrlon = lon_0 - sgn*45. + self.urcrnrlon = lon_0 + sgn*135. + proj = pyproj.Proj(projparams) + x,y = proj(lon_0,boundinglat) + lon,self.llcrnrlat = proj(math.sqrt(2.)*y,0.,inverse=True) + self.urcrnrlat = self.llcrnrlat + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % _projnames[projection] + elif projection == 'laea': + if lat_0 is None or lon_0 is None: + raise ValueError, 'must specify lat_0 and lon_0 for Lambert Azimuthal basemap' + if not using_corners: + if width is None or height is None: + raise ValueError, 'must either specify lat/lon values of corners (llcrnrlon,llcrnrlat,ucrnrlon,urcrnrlat) in degrees or width and height in meters' + if lon_0 is None or lat_0 is None: + raise ValueError, 'must specify lon_0 and lat_0 when using width, height to specify projection region' + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat = _choosecorners(width,height,**projparams) + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection == 'merc': + if lat_ts is None: + raise ValueError, 'must specify lat_ts for Mercator basemap' + # clip plot region to be within -89.99S to 89.99N + # (mercator is singular at poles) + if not using_corners: + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + if llcrnrlat < -89.99: llcrnrlat = -89.99 + if llcrnrlat > 89.99: llcrnrlat = 89.99 + if urcrnrlat < -89.99: urcrnrlat = -89.99 + if urcrnrlat > 89.99: urcrnrlat = 89.99 + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + elif projection in ['tmerc','gnom','cass','poly'] : + if lat_0 is None or lon_0 is None: + raise ValueError, 'must specify lat_0 and lon_0 for Transverse Mercator, Gnomonic, Cassini-Soldnerr Polyconic basemap' + if not using_corners: + if width is None or height is None: + raise ValueError, 'must either specify lat/lon values of corners (llcrnrlon,llcrnrlat,ucrnrlon,urcrnrlat) in degrees or width and height in meters' + if lon_0 is None or lat_0 is None: + raise ValueError, 'must specify lon_0 and lat_0 when using width, height to specify projection region' + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat = _choosecorners(width,height,**projparams) + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection == 'ortho': + if not projparams.has_key('R'): + raise ValueError, 'orthographic projection only works for perfect spheres - not ellipsoids' + if lat_0 is None or lon_0 is None: + raise ValueError, 'must specify lat_0 and lon_0 for Orthographic basemap' + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + if not using_corners: + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + self._fulldisk = True + else: + self._fulldisk = False + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + # FIXME: won't work for points exactly on equator?? + if npy.abs(lat_0) < 1.e-2: lat_0 = 1.e-2 + projparams['lat_0'] = lat_0 + elif projection == 'geos': + if lon_0 is None: + raise ValueError, 'must specify lon_0 for Geostationary basemap' + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + if not using_corners: + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + self._fulldisk = True + else: + self._fulldisk = False + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection in ['moll','robin','sinu']: + if lon_0 is None: + raise ValueError, 'must specify lon_0 for Robinson, Mollweide, or Sinusoidal basemap' + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection == 'omerc': + if lat_1 is None or lon_1 is None or lat_2 is None or lon_2 is None: + raise ValueError, 'must specify lat_1,lon_1 and lat_2,lon_2 for Oblique Mercator basemap' + projparams['lat_1'] = lat_1 + projparams['lon_1'] = lon_1 + projparams['lat_2'] = lat_2 + projparams['lon_2'] = lon_2 + if not using_corners: + raise ValueError, 'cannot specify map region with width and height keywords for this projection, please specify lat/lon values of corners' + elif projection == 'aeqd': + if lat_0 is None or lon_0 is None: + raise ValueError, 'must specify lat_0 and lon_0 for Azimuthal Equidistant basemap' + if not using_corners: + if width is None or height is None: + raise ValueError, 'must either specify lat/lon values of corners (llcrnrlon,llcrnrlat,ucrnrlon,urcrnrlat) in degrees or width and height in meters' + if lon_0 is None or lat_0 is None: + raise ValueError, 'must specify lon_0 and lat_0 when using width, height to specify projection region' + llcrnrlon,llcrnrlat,urcrnrlon,urcrnrlat = _choosecorners(width,height,**projparams) + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + elif projection == 'mill': + if not using_corners: + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + elif projection == 'cyl': + if not using_corners: + llcrnrlon = -180. + llcrnrlat = -90. + urcrnrlon = 180 + urcrnrlat = 90. + self.llcrnrlon = llcrnrlon; self.llcrnrlat = llcrnrlat + self.urcrnrlon = urcrnrlon; self.urcrnrlat = urcrnrlat + if width is not None or height is not None: + print 'warning: width and height keywords ignored for %s projection' % self.projection + else: + raise ValueError(_unsupported_projection % projection) + + # initialize proj4 + proj = Proj(projparams,self.llcrnrlon,self.llcrnrlat,self.urcrnrlon,self.urcrnrlat) + + # make sure axis ticks are suppressed. + self.noticks = suppress_ticks + + # make Proj instance a Basemap instance variable. + self.projtran = proj + # copy some Proj attributes. + atts = ['rmajor','rminor','esq','flattening','ellipsoid','projparams'] + for att in atts: + self.__dict__[att] = proj.__dict__[att] + # these only exist for geostationary projection. + if hasattr(proj,'_width'): + self.__dict__['_width'] = proj.__dict__['_width'] + if hasattr(proj,'_height'): + self.__dict__['_height'] = proj.__dict__['_height'] + # spatial reference string (useful for georeferencing output + # images with gdal_translate). + if hasattr(self,'_proj4'): + self.srs = proj._proj4.srs + else: + pjargs = [] + for key,value in self.projparams.iteritems(): + # 'cyl' projection translates to 'eqc' in PROJ.4 + if projection == 'cyl' and key == 'proj': + value = 'eqc' + # ignore x_0 and y_0 settings for 'cyl' projection + # (they are not consistent with what PROJ.4 uses) + elif projection == 'cyl' and key in ['x_0','y_0']: + continue + pjargs.append('+'+key+"="+str(value)+' ') + self.srs = ''.join(pjargs) + # set instance variables defining map region. + self.xmin = proj.xmin + self.xmax = proj.xmax + self.ymin = proj.ymin + self.ymax = proj.ymax + if projection == 'cyl': + self.aspect = (self.urcrnrlat-self.llcrnrlat)/(self.urcrnrlon-self.llcrnrlon) + else: + self.aspect = (proj.ymax-proj.ymin)/(proj.xmax-proj.xmin) + self.llcrnrx = proj.llcrnrx + self.llcrnry = proj.llcrnry + self.urcrnrx = proj.urcrnrx + self.urcrnry = proj.urcrnry + + # set min/max lats for projection domain. + if projection in ['mill','cyl','merc']: + self.latmin = self.llcrnrlat + self.latmax = self.urcrnrlat + elif projection in ['ortho','geos','moll','robin','sinu']: + self.latmin = -90. + self.latmax = 90. + else: + lons, lats = self.makegrid(101,101) + self.latmin = lats.min() + self.latmax = lats.max() + + # if ax == None, pylab.gca may be used. + self.ax = ax + self.lsmask = None + + # set defaults for area_thresh. + self.resolution = resolution + if area_thresh is None and resolution is not None: + if resolution == 'c': + area_thresh = 10000. + elif resolution == 'l': + area_thresh = 1000. + elif resolution == 'i': + area_thresh = 100. + elif resolution == 'h': + area_thresh = 10. + elif resolution == 'f': + area_thresh = 1. + else: + raise ValueError, "boundary resolution must be one of 'c','l','i','h' or 'f'" + self.area_thresh = area_thresh + # define map boundary polygon (in lat/lon coordinates) + self._boundarypolyll, self._boundarypolyxy = self._getmapboundary() + # read in coastline polygons, only keeping those that + # intersect map boundary polygon. + if self.resolution is not None: + self.coastsegs, self.coastpolygontypes = self._readboundarydata('gshhs') + # reformat for use in matplotlib.patches.Polygon. + self.coastpolygons = [] + # also, split coastline segments that jump across entire plot. + coastsegs = [] + for seg in self.coastsegs: + x, y = zip(*seg) + self.coastpolygons.append((x,y)) + x = npy.array(x,npy.float64); y = npy.array(y,npy.float64) + xd = (x[1:]-x[0:-1])**2 + yd = (y[1:]-y[0:-1])**2 + dist = npy.sqrt(xd+yd) + split = dist > 5000000. + if npy.sum(split) and self.projection not in ['merc','cyl','mill']: + ind = (npy.compress(split,squeeze(split*npy.indices(xd.shape)))+1).tolist() + iprev = 0 + ind.append(len(xd)) + for i in ind: + # don't add empty lists. + if len(range(iprev,i)): + coastsegs.append(zip(x[iprev:i],y[iprev:i])) + iprev = i + else: + coastsegs.append(seg) + self.coastsegs = coastsegs + # set __init__'s docstring + __init__.__doc__ = _Basemap_init_doc + + def __call__(self,x,y,inverse=False): + """ + Calling a Basemap class instance with the arguments lon, lat will + convert lon/lat (in degrees) to x/y native map projection + coordinates (in meters). If optional keyword 'inverse' is + True (default is False), the inverse transformation from x/y + to lon/lat is performed. + + For cylindrical equidistant projection ('cyl'), this + does nothing (i.e. x,y == lon,lat). + + For non-cylindrical projections, the inverse transformation + always returns longitudes between -180 and 180 degrees. For + cylindrical projections (self.projection == 'cyl','mill' or 'merc') + the inverse transformation will return longitudes between + self.llcrnrlon and self.llcrnrlat. + + input arguments lon, lat can be either scalar floats or N arrays. + """ + return self.projtran(x,y,inverse=inverse) + + def makegrid(self,nx,ny,returnxy=False): + """ + return arrays of shape (ny,nx) containing lon,lat coordinates of + an equally spaced native projection grid. + if returnxy = True, the x,y values of the grid are returned also. + """ + return self.projtran.makegrid(nx,ny,returnxy=returnxy) + + def _readboundarydata(self,name): + """ + read boundary data, clip to map projection region. + """ + msg = dedent(""" + Unable to open boundary dataset file. Only the 'crude', 'low', + 'intermediate' and 'high' resolution datasets are installed by default. + If you are requesting a 'full' resolution dataset, you may need to + download and install those files separately + (see the basemap README for details).""") + try: + bdatfile = open(os.path.join(basemap_datadir,name+'_'+self.resolution+'.dat'),'rb') + bdatmetafile = open(os.path.join(basemap_datadir,name+'meta_'+self.resolution+'.dat'),'r') + except: + raise IOError, msg + polygons = [] + polygon_types = [] + # coastlines are polygons, other boundaries are line segments. + if name == 'gshhs': + Shape = _geos.Polygon + else: + Shape = _geos.LineString + # see if map projection region polygon contains a pole. + NPole = _geos.Point(self(0.,90.)) + SPole = _geos.Point(self(0.,-90.)) + boundarypolyxy = self._boundarypolyxy + boundarypolyll = self._boundarypolyll + hasNP = NPole.within(boundarypolyxy) + hasSP = SPole.within(boundarypolyxy) + containsPole = hasNP or hasSP + # these projections cannot cross pole. + if containsPole and\ + self.projection in ['tmerc','cass','omerc','merc','mill','cyl','robin','moll','sinu','geos']: + raise ValueError('%s projection cannot cross pole'%(self.projection)) + # make sure orthographic projection has containsPole=True + # we will compute the intersections in stereographic + # coordinates, then transform to orthographic. + if self.projection == 'ortho' and name == 'gshhs': + containsPole = True + lon_0=self.projparams['lon_0'] + lat_0=self.projparams['lat_0'] + re = self.projparams['R'] + # center of stereographic projection restricted to be + # nearest one of 6 points on the sphere (every 90 deg lat/lon). + lon0 = 90.*(npy.around(lon_0/90.)) + lat0 = 90.*(npy.around(lat_0/90.)) + if npy.abs(int(lat0)) == 90: lon0=0. + maptran = pyproj.Proj(proj='stere',lon_0=lon0,lat_0=lat0,R=re) + # boundary polygon for orthographic projection + # in stereographic coorindates. + b = self._boundarypolyll.boundary + blons = b[:,0]; blats = b[:,1] + b[:,0], b[:,1] = maptran(blons, blats) + boundarypolyxy = _geos.Polygon(b) + for line in bdatmetafile: + linesplit = line.split() + area = float(linesplit[1]) + south = float(linesplit[3]) + north = float(linesplit[4]) + if area < 0.: area = 1.e30 + useit = self.latmax>=south and self.latmin<=north and area>self.area_thresh + if useit: + type = int(linesplit[0]) + npts = int(linesplit[2]) + offsetbytes = int(linesplit[5]) + bytecount = int(linesplit[6]) + bdatfile.seek(offsetbytes,0) + # read in binary string convert into an npts by 2 + # numpy array (first column is lons, second is lats). + polystring = bdatfile.read(bytecount) + # binary data is little endian. + b = npy.array(npy.fromstring(polystring,dtype='<f4'),'f8') + b.shape = (npts,2) + b2 = b.copy() + # if map boundary polygon is a valid one in lat/lon + # coordinates (i.e. it does not contain either pole), + # the intersections of the boundary geometries + # and the map projection region can be computed before + # transforming the boundary geometry to map projection + # coordinates (this saves time, especially for small map + # regions and high-resolution boundary geometries). + if not containsPole: + # close Antarctica. + if name == 'gshhs' and south < -68 and area > 10000: + lons = b[:,0] + lats = b[:,1] + lons2 = lons[:-2][::-1] + lats2 = lats[:-2][::-1] + lons1 = lons2 - 360. + lons3 = lons2 + 360. + lons = lons1.tolist()+lons2.tolist()+lons3.tolist() + lats = lats2.tolist()+lats2.tolist()+lats2.tolist() + lonstart,latstart = lons[0], lats[0] + lonend,latend = lons[-1], lats[-1] + lons.insert(0,lonstart) + lats.insert(0,-90.) + lons.append(lonend) + lats.append(-90.) + b = npy.empty((len(lons),2),npy.float64) + b[:,0] = lons; b[:,1] = lats + poly = _geos.Polygon(b) + antart = True + else: + poly = Shape(b) + antart = False + # create duplicate polygons shifted by -360 and +360 + # (so as to properly treat polygons that cross + # Greenwich meridian). + if not antart: + b2[:,0] = b[:,0]-360 + poly1 = Shape(b2) + b2[:,0] = b[:,0]+360 + poly2 = Shape(b2) + polys = [poly1,poly,poly2] + else: # Antartica already extends from -360 to +720. + polys = [poly] + for poly in polys: + # if polygon instersects map projection + # region, process it. + if poly.intersects(boundarypolyll): + # if geometry intersection calculation fails, + # just move on. + try: + geoms = poly.intersection(boundarypolyll) + except: + continue + # iterate over geometries in intersection. + for psub in geoms: + # only coastlines are polygons, + # which have a 'boundary' attribute. + # otherwise, use 'coords' attribute + # to extract coordinates. + b = psub.boundary + blons = b[:,0]; blats = b[:,1] + # transformation from lat/lon to + # map projection coordinates. + bx, by = self(blons, blats) + polygons.append(zip(bx,by)) + polygon_types.append(type) + # if map boundary polygon is not valid in lat/lon + # coordinates, compute intersection between map + # projection region and boundary geometries in map + # projection coordinates. + else: + # transform coordinates from lat/lon + # to map projection coordinates. + # special case for ortho, compute coastline polygon + # vertices in stereographic coords. + if name == 'gshhs' and self.projection == 'ortho': + b[:,0], b[:,1] = maptran(b[:,0], b[:,1]) + else: + b[:,0], b[:,1] = self(b[:,0], b[:,1]) + goodmask = npy.logical_and(b[:,0]<1.e20,b[:,1]<1.e20) + # if less than two points are valid in + # map proj coords, skip this geometry. + if npy.sum(goodmask) <= 1: continue + if name != 'gshhs': + # if not a polygon, + # just remove parts of geometry that are undefined + # in this map projection. + bx = npy.compress(goodmask, b[:,0]) + by = npy.compress(goodmask, b[:,1]) + # for orthographic projection, all points + # outside map projection region are eliminated + # with the above step, so we're done. + if self.projection == 'ortho': + polygons.append(zip(bx,by)) + polygon_types.append(type) + continue + # create a GEOS geometry object. + poly = Shape(b) + # if geometry instersects map projection + # region, and doesn't have any invalid points, process it. + if goodmask.all() and poly.intersects(boundarypolyxy): + # if geometry intersection calculation fails, + # just move on. + try: + geoms = poly.intersection(boundarypolyxy) + except: + continue + # iterate over geometries in intersection. + for psub in geoms: + b = psub.boundary + # if projection == 'ortho', + # transform polygon from stereographic + # to orthographic coordinates. + if self.projection == 'ortho': + # if coastline polygon covers more than 99% + # of map region for fulldisk projection, + # it's probably bogus, so skip it. + areafrac = psub.area()/boundarypolyxy.area() + if name == 'gshhs' and\ + self._fulldisk and\ + areafrac > 0.99: continue + # inverse transform from stereographic + # to lat/lon. + b[:,0], b[:,1] = maptran(b[:,0], b[:,1], inverse=True) + # orthographic. + b[:,0], b[:,1]= self(b[:,0], b[:,1]) + polygons.append(zip(b[:,0],b[:,1])) + polygon_types.append(type) + return polygons, polygon_types + + def _getmapboundary(self): + """ + create map boundary polygon (in lat/lon and x/y coordinates) + """ + nx = 100 + ny = 100 + maptran = self + if self.projection in ['ortho','geos']: + # circular region. + thetas = linspace(0.,2.*npy.pi,2*nx*ny)[:-1] + if self.projection == 'ortho': + rminor = self.rmajor + rmajor = self.rmajor + else: + rminor = self._height + rmajor = self._width + x = rmajor*npy.cos(thetas) + rmajor + y = rminor*npy.sin(thetas) + rminor + b = npy.empty((len(x),2),npy.float64) + b[:,0]=x; b[:,1]=y + boundaryxy = _geos.Polygon(b) + # compute proj instance for full disk, if necessary. + if not self._fulldisk: + projparms = self.projparams + del projparms['x_0'] + del projparms['y_0'] + if self.projection == 'ortho': + llcrnrx = -self.rmajor + llcrnry = -self.rmajor + urcrnrx = -llcrnrx + urcrnry = -llcrnry + else: + llcrnrx = -self._width + llcrnry = -self._height + urcrnrx = -llcrnrx + urcrnry = -llcrnry + projparms['x_0']=-llcrnrx + projparms['y_0']=-llcrnry + maptran = pyproj.Proj(projparms) + elif self.projection in ['moll','robin','sinu']: + # quasi-elliptical region. + lon_0 = self.projparams['lon_0'] + # left side + lats1 = linspace(-89.9,89.9,ny).tolist() + lons1 = len(lats1)*[lon_0-179.9] + # top. + lons2 = linspace(lon_0-179.9,lon_0+179.9,nx).tolist() + lats2 = len(lons2)*[89.9] + # right side + lats3 = linspace(89.9,-89.9,ny).tolist() + lons3 = len(lats3)*[lon_0+179.9] + # bottom. + lons4 = linspace(lon_0+179.9,lon_0-179.9,nx).tolist() + lats4 = len(lons4)*[-89.9] + lons = npy.array(lons1+lons2+lons3+lons4,npy.float64) + lats = npy.array(lats1+lats2+lats3+lats4,npy.float64) + x, y = maptran(lons,lats) + b = npy.empty((len(x),2),npy.float64) + b[:,0]=x; b[:,1]=y + boundaryxy = _geos.Polygon(b) + else: # all other projections are rectangular. + # left side (x = xmin, ymin <= y <= ymax) + yy = linspace(self.ymin, self.ymax, ny)[:-1] + x = len(yy)*[self.xmin]; y = yy.tolist() + # top (y = ymax, xmin <= x <= xmax) + xx = npy.linspace(self.xmin, self.xmax, nx)[:-1] + x = x + xx.tolist() + y = y + len(xx)*[self.ymax] + # right side (x = xmax, ymin <= y <= ymax) + yy = linspace(self.ymax, self.ymin, ny)[:-1] + x = x + len(yy)*[self.xmax]; y = y + yy.tolist() + # bottom (y = ymin, xmin <= x <= xmax) + xx = linspace(self.xmax, self.xmin, nx)[:-1] + x = x + xx.tolist() + y = y + len(xx)*[self.ymin] + x = npy.array(x,npy.float64) + y = npy.array(y,npy.float64) + b = npy.empty((4,2),npy.float64) + b[:,0]=[self.xmin,self.xmin,self.xmax,self.xmax] + b[:,1]=[self.ymin,self.ymax,self.ymax,self.ymin] + boundaryxy = _geos.Polygon(b) + if self.projection in ['mill','merc','cyl']: + # make sure map boundary doesn't quite include pole. + if self.urcrnrlat > 89.9999: + urcrnrlat = 89.9999 + else: + urcrnrlat = self.urcrnrlat + if self.llcrnrlat < -89.9999: + llcrnrlat = -89.9999 + else: + llcrnrlat = self.llcrnrlat + lons = [self.llcrnrlon, self.llcrnrlon, self.urcrnrlon, self.urcrnrlon] + lats = [llcrnrlat, urcrnrlat, urcrnrlat, llcrnrlat] + x, y = self(lons, lats) + b = npy.empty((len(x),2),npy.float64) + b[:,0]=x; b[:,1]=y + boundaryxy = _geos.Polygon(b) + else: + if self.projection not in ['moll','robin','sinu']: + lons, lats = maptran(x,y,inverse=True) + # fix lons so there are no jumps. + n = 1 + lonprev = lons[0] + for lon,lat in zip(lons[1:],lats[1:]): + if npy.abs(lon-lonprev) > 90.: + if lonprev < 0: + lon = lon - 360. + else: + lon = lon + 360 + lons[n] = lon + lonprev = lon + n = n + 1 + b = npy.empty((len(lons),2),npy.float64) + b[:,0]=lons; b[:,1]=lats + boundaryll = _geos.Polygon(b) + return boundaryll, boundaryxy + + + def drawmapboundary(self,color='k',linewidth=1.0,fill_color=None,\ + zorder=None,ax=None): + """ + draw boundary around map projection region, optionally + filling interior of region. + + linewidth - line width for boundary (default 1.) + color - color of boundary line (default black) + fill_color - fill the map region background with this + color (default is no fill or fill with axis background color). + zorder - sets the zorder for filling map background + (default 0). + ax - axes instance to use (default None, use default axes + instance). + """ + # get current axes instance (if none specified). + if ax is None and... [truncated message content] |
From: <js...@us...> - 2008-01-10 14:31:50
|
Revision: 4851 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4851&view=rev Author: jswhit Date: 2008-01-10 06:31:37 -0800 (Thu, 10 Jan 2008) Log Message: ----------- re-enable python 2.3 support Modified Paths: -------------- trunk/toolkits/basemap/Changelog trunk/toolkits/basemap/README Modified: trunk/toolkits/basemap/Changelog =================================================================== --- trunk/toolkits/basemap/Changelog 2008-01-10 14:05:56 UTC (rev 4850) +++ trunk/toolkits/basemap/Changelog 2008-01-10 14:31:37 UTC (rev 4851) @@ -1,10 +1,11 @@ -version 0.9.9.2 +version 0.99 * Now lives in mpl_toolkits.basemap. Instead of 'from matplotlib.toolkits.basemap import Basemap', use 'from mpl_toolkits.basemap import Basemap'. All examples changed. Uses matplotlib mpl_toolkits namespace package, so basemap can now be installed - if matplotlib is installed as an egg. + if matplotlib is installed as an egg. Python 2.3 + support re-enabled. version 0.9.9.1 (svn revision 4808) * require python 2.4 (really only needed for building). Once namespace packages are re-enabled in matplotlib, Modified: trunk/toolkits/basemap/README =================================================================== --- trunk/toolkits/basemap/README 2008-01-10 14:05:56 UTC (rev 4850) +++ trunk/toolkits/basemap/README 2008-01-10 14:31:37 UTC (rev 4851) @@ -5,9 +5,9 @@ **Requirements** -python 2.4 (or higher) +python 2.3 (or higher) -matplotlib 0.90 (or higher) +matplotlib 0.98 (or higher) numpy 1.0 (or higher) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2008-01-11 23:32:20
|
Revision: 4860 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4860&view=rev Author: jswhit Date: 2008-01-11 15:32:10 -0800 (Fri, 11 Jan 2008) Log Message: ----------- fixed installation of fullres data for new mpl_toolkits namespace Modified Paths: -------------- trunk/toolkits/basemap/setup-data.py trunk/toolkits/basemap/setupegg-data.py Modified: trunk/toolkits/basemap/setup-data.py =================================================================== --- trunk/toolkits/basemap/setup-data.py 2008-01-11 21:08:45 UTC (rev 4859) +++ trunk/toolkits/basemap/setup-data.py 2008-01-11 23:32:10 UTC (rev 4860) @@ -3,21 +3,21 @@ if major==2 and minor1<=3: # setuptools monkeypatches distutils.core.Distribution to support # package_data - #try: import setuptools - #except ImportError: - # raise SystemExit(""" -#matplotlib requires setuptools for installation. Please download -#http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if -#you are doing a system wide install) to install the proper version of -#setuptools for your system""") + try: import setuptools + except ImportError: + raise SystemExit(""" +matplotlib requires setuptools for installation. Please download +http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if +you are doing a system wide install) to install the proper version of +setuptools for your system""") raise SystemExit("""The basemap toolkit requires python 2.4.""") from distutils.core import setup -packages = ['matplotlib.toolkits.basemap.data'] +packages = ['mpl_toolkits.basemap.data'] package_dirs = {'':'lib'} boundaryfiles = glob.glob("lib/matplotlib/toolkits/basemap/data/*_f.dat") basemap_datafiles = [os.path.basename(bfile) for bfile in boundaryfiles] -package_data = {'matplotlib.toolkits.basemap.data':basemap_datafiles} +package_data = {'mpl_toolkits.basemap.data':basemap_datafiles} setup( name = "basemap-data-fullres", version = "0.9.7", Modified: trunk/toolkits/basemap/setupegg-data.py =================================================================== --- trunk/toolkits/basemap/setupegg-data.py 2008-01-11 21:08:45 UTC (rev 4859) +++ trunk/toolkits/basemap/setupegg-data.py 2008-01-11 23:32:10 UTC (rev 4860) @@ -3,4 +3,6 @@ """ from setuptools import setup -execfile('setup-data.py') +execfile('setup-data.py', + {'additional_params' : + {'namespace_packages' : ['mpl_toolkits']}}) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2008-01-11 23:59:13
|
Revision: 4861 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4861&view=rev Author: jswhit Date: 2008-01-11 15:59:04 -0800 (Fri, 11 Jan 2008) Log Message: ----------- remove separate data package (won't work unless nested namespace packages are used anyway) Modified Paths: -------------- trunk/toolkits/basemap/MANIFEST.in Removed Paths: ------------- trunk/toolkits/basemap/setup-data.py trunk/toolkits/basemap/setupegg-data.py Modified: trunk/toolkits/basemap/MANIFEST.in =================================================================== --- trunk/toolkits/basemap/MANIFEST.in 2008-01-11 23:32:10 UTC (rev 4860) +++ trunk/toolkits/basemap/MANIFEST.in 2008-01-11 23:59:04 UTC (rev 4861) @@ -84,54 +84,4 @@ recursive-include lib/httplib2 * recursive-include lib/dbflib * recursive-include lib/shapelib * -include lib/mpl_toolkits/basemap/data/5minmask.bin -include lib/mpl_toolkits/basemap/data/GL27 -include lib/mpl_toolkits/basemap/data/countries_c.dat -include lib/mpl_toolkits/basemap/data/countries_h.dat -include lib/mpl_toolkits/basemap/data/countries_i.dat -include lib/mpl_toolkits/basemap/data/countries_l.dat -include lib/mpl_toolkits/basemap/data/countriesmeta_c.dat -include lib/mpl_toolkits/basemap/data/countriesmeta_h.dat -include lib/mpl_toolkits/basemap/data/countriesmeta_i.dat -include lib/mpl_toolkits/basemap/data/countriesmeta_l.dat -include lib/mpl_toolkits/basemap/data/epsg -include lib/mpl_toolkits/basemap/data/esri -include lib/mpl_toolkits/basemap/data/esri.extra -include lib/mpl_toolkits/basemap/data/gshhs_c.dat -include lib/mpl_toolkits/basemap/data/gshhs_h.dat -include lib/mpl_toolkits/basemap/data/gshhs_i.dat -include lib/mpl_toolkits/basemap/data/gshhs_l.dat -include lib/mpl_toolkits/basemap/data/gshhsmeta_c.dat -include lib/mpl_toolkits/basemap/data/gshhsmeta_h.dat -include lib/mpl_toolkits/basemap/data/gshhsmeta_i.dat -include lib/mpl_toolkits/basemap/data/gshhsmeta_l.dat -include lib/mpl_toolkits/basemap/data/nad.lst -include lib/mpl_toolkits/basemap/data/nad27 -include lib/mpl_toolkits/basemap/data/nad83 -include lib/mpl_toolkits/basemap/data/ntv2_out.dist -include lib/mpl_toolkits/basemap/data/other.extra -include lib/mpl_toolkits/basemap/data/pj_out27.dist -include lib/mpl_toolkits/basemap/data/pj_out83.dist -include lib/mpl_toolkits/basemap/data/proj_def.dat -include lib/mpl_toolkits/basemap/data/rivers_c.dat -include lib/mpl_toolkits/basemap/data/rivers_h.dat -include lib/mpl_toolkits/basemap/data/rivers_i.dat -include lib/mpl_toolkits/basemap/data/rivers_l.dat -include lib/mpl_toolkits/basemap/data/riversmeta_c.dat -include lib/mpl_toolkits/basemap/data/riversmeta_h.dat -include lib/mpl_toolkits/basemap/data/riversmeta_i.dat -include lib/mpl_toolkits/basemap/data/riversmeta_l.dat -include lib/mpl_toolkits/basemap/data/states_c.dat -include lib/mpl_toolkits/basemap/data/states_h.dat -include lib/mpl_toolkits/basemap/data/states_i.dat -include lib/mpl_toolkits/basemap/data/states_l.dat -include lib/mpl_toolkits/basemap/data/statesmeta_c.dat -include lib/mpl_toolkits/basemap/data/statesmeta_h.dat -include lib/mpl_toolkits/basemap/data/statesmeta_i.dat -include lib/mpl_toolkits/basemap/data/statesmeta_l.dat -include lib/mpl_toolkits/basemap/data/td_out.dist -include lib/mpl_toolkits/basemap/data/test27 -include lib/mpl_toolkits/basemap/data/test83 -include lib/mpl_toolkits/basemap/data/testntv2 -include lib/mpl_toolkits/basemap/data/testvarious -include lib/mpl_toolkits/basemap/data/world +recursive-include lib/mpl_toolkits/basemap/data * Deleted: trunk/toolkits/basemap/setup-data.py =================================================================== --- trunk/toolkits/basemap/setup-data.py 2008-01-11 23:32:10 UTC (rev 4860) +++ trunk/toolkits/basemap/setup-data.py 2008-01-11 23:59:04 UTC (rev 4861) @@ -1,41 +0,0 @@ -import sys, glob, os -major, minor1, minor2, s, tmp = sys.version_info -if major==2 and minor1<=3: - # setuptools monkeypatches distutils.core.Distribution to support - # package_data - try: import setuptools - except ImportError: - raise SystemExit(""" -matplotlib requires setuptools for installation. Please download -http://peak.telecommunity.com/dist/ez_setup.py and run it (as su if -you are doing a system wide install) to install the proper version of -setuptools for your system""") - raise SystemExit("""The basemap toolkit requires python 2.4.""") -from distutils.core import setup - -packages = ['mpl_toolkits.basemap.data'] -package_dirs = {'':'lib'} -boundaryfiles = glob.glob("lib/matplotlib/toolkits/basemap/data/*_f.dat") -basemap_datafiles = [os.path.basename(bfile) for bfile in boundaryfiles] -package_data = {'mpl_toolkits.basemap.data':basemap_datafiles} -setup( - name = "basemap-data-fullres", - version = "0.9.7", - description = "full-resolution boundary data for basemap", - url = "http://matplotlib.sourceforge.net/toolkits.html", - download_url = "http://sourceforge.net/projects/matplotlib", - author = "Jeff Whitaker", - author_email = "jef...@no...", - platforms = ["any"], - license = "OSI Approved", - keywords = ["python","plotting","plots","graphs","charts","GIS","mapping","map projections","maps"], - classifiers = ["Development Status :: 4 - Beta", - "Intended Audience :: Science/Research", - "License :: OSI Approved", - "Topic :: Scientific/Engineering :: Visualization", - "Topic :: Software Development :: Libraries :: Python Modules", - "Operating System :: OS Independent"], - packages = packages, - package_dir = package_dirs, - package_data = package_data - ) Deleted: trunk/toolkits/basemap/setupegg-data.py =================================================================== --- trunk/toolkits/basemap/setupegg-data.py 2008-01-11 23:32:10 UTC (rev 4860) +++ trunk/toolkits/basemap/setupegg-data.py 2008-01-11 23:59:04 UTC (rev 4861) @@ -1,8 +0,0 @@ -""" -Poor man's setuptools script... -""" - -from setuptools import setup -execfile('setup-data.py', - {'additional_params' : - {'namespace_packages' : ['mpl_toolkits']}}) This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |
From: <js...@us...> - 2008-01-26 13:17:22
|
Revision: 4899 http://matplotlib.svn.sourceforge.net/matplotlib/?rev=4899&view=rev Author: jswhit Date: 2008-01-26 05:17:19 -0800 (Sat, 26 Jan 2008) Log Message: ----------- adjust geos and ortho corner specification. Modified Paths: -------------- trunk/toolkits/basemap/examples/geos_demo_3.py trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py Modified: trunk/toolkits/basemap/examples/geos_demo_3.py =================================================================== --- trunk/toolkits/basemap/examples/geos_demo_3.py 2008-01-26 13:09:36 UTC (rev 4898) +++ trunk/toolkits/basemap/examples/geos_demo_3.py 2008-01-26 13:17:19 UTC (rev 4899) @@ -7,7 +7,7 @@ m1 = Basemap(projection='geos',lon_0=lon_0,rsphere=(6378137.00,6356752.3142),resolution=None) ax = fig.add_axes([0.1,0.1,0.8,0.8],axisbg='k') # plot just upper right quadrant. -m = Basemap(projection='geos',lon_0=lon_0,rsphere=(6378137.00,6356752.3142),resolution='l',llcrnrx=m1.urcrnrx/2.,llcrnry=m1.urcrnry/2.,urcrnrx=m1.urcrnrx,urcrnry=m1.urcrnry) +m = Basemap(projection='geos',lon_0=lon_0,rsphere=(6378137.00,6356752.3142),resolution='l',llcrnrx=0.,llcrnry=0.,urcrnrx=m1.urcrnrx/2.,urcrnry=m1.urcrnry/2.) print m.projparams m.drawcoastlines() m.drawmapboundary(fill_color='aqua') @@ -23,7 +23,7 @@ m1 = Basemap(projection='ortho',lon_0=lon_0,lat_0=10,resolution=None) ax = fig.add_axes([0.1,0.1,0.8,0.8],axisbg='k') # plot just upper right quadrant. -m = Basemap(projection='ortho',lon_0=lon_0,lat_0=10,resolution='l',llcrnrx=m1.urcrnrx/2.,llcrnry=m1.urcrnry/2.,urcrnrx=m1.urcrnrx,urcrnry=m1.urcrnry) +m = Basemap(projection='ortho',lon_0=lon_0,lat_0=10,resolution='l',llcrnrx=0.,llcrnry=0.,urcrnrx=m1.urcrnrx/2.,urcrnry=m1.urcrnry/2.) print m.projparams m.drawcoastlines() m.drawmapboundary(fill_color='aqua') Modified: trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py =================================================================== --- trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py 2008-01-26 13:09:36 UTC (rev 4898) +++ trunk/toolkits/basemap/lib/mpl_toolkits/basemap/basemap.py 2008-01-26 13:17:19 UTC (rev 4899) @@ -145,8 +145,9 @@ corners or width and height must be specified by the user. For 'ortho' and 'geos', the lat/lon values of the corners may be specified, or the x/y values of the corners (llcrnrx,llcrnry,urcrnrx,urcrnry) in the - coordinate system of the global projection. If the corners are not - specified, the entire globe is plotted. + coordinate system of the global projection (with x=0,y=0 at the center + of the global projection). If the corners are not specified, + the entire globe is plotted. resolution - resolution of boundary database to use. Can be 'c' (crude), 'l' (low), 'i' (intermediate), 'h' (high), 'f' (full) or None. @@ -612,10 +613,10 @@ self.aspect = (proj.ymax-proj.ymin)/(proj.xmax-proj.xmin) if projection in ['geos','ortho'] and \ None not in [llcrnrx,llcrnry,urcrnrx,urcrnry]: - self.llcrnrx = llcrnrx - self.llcrnry = llcrnry - self.urcrnrx = urcrnrx - self.urcrnry = urcrnry + self.llcrnrx = llcrnrx+0.5*proj.xmax + self.llcrnry = llcrnry+0.5*proj.ymax + self.urcrnrx = urcrnrx+0.5*proj.xmax + self.urcrnry = urcrnry+0.5*proj.ymax self._fulldisk = False else: self.llcrnrx = proj.llcrnrx This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site. |