From scipy-svn at scipy.org Wed May 2 04:56:01 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 2 May 2007 03:56:01 -0500 (CDT) Subject: [Scipy-svn] r2951 - in trunk/Lib/sparse: . tests Message-ID: <20070502085601.52A6939C0B6@new.scipy.org> Author: stefan Date: 2007-05-02 03:55:39 -0500 (Wed, 02 May 2007) New Revision: 2951 Modified: trunk/Lib/sparse/sparse.py trunk/Lib/sparse/tests/test_sparse.py Log: Fix multiplication by zero in lil_matrix. Modified: trunk/Lib/sparse/sparse.py =================================================================== --- trunk/Lib/sparse/sparse.py 2007-04-30 22:15:08 UTC (rev 2950) +++ trunk/Lib/sparse/sparse.py 2007-05-02 08:55:39 UTC (rev 2951) @@ -2415,21 +2415,21 @@ self._insertat3(row, data, j, xx) else: raise ValueError, "invalid index value: %s" % str((i, j)) - def __mul__(self, other): # self * other if isscalarlike(other): - new = self.copy() if other == 0: # Multiply by zero: return the zero matrix - return new - # Multiply this scalar by every element. - new.data = numpy.array([[val*other for val in rowvals] for rowvals in new.data], dtype=object) + new = lil_matrix(shape=self.shape, dtype=self.dtype) + else: + new = self.copy() + # Multiply this scalar by every element. + new.data = numpy.array([[val*other for val in rowvals] for + rowvals in new.data], dtype=object) return new else: return self.dot(other) - def copy(self): new = lil_matrix(self.shape, dtype=self.dtype) new.data = copy.deepcopy(self.data) Modified: trunk/Lib/sparse/tests/test_sparse.py =================================================================== --- trunk/Lib/sparse/tests/test_sparse.py 2007-04-30 22:15:08 UTC (rev 2950) +++ trunk/Lib/sparse/tests/test_sparse.py 2007-05-02 08:55:39 UTC (rev 2951) @@ -710,7 +710,17 @@ B[5,6] = 20 assert_array_equal(A * A.T, (B * B.T).todense()) assert_array_equal(A * A.H, (B * B.H).todense()) - + + def check_scalar_mul(self): + x = lil_matrix((3,3)) + x[0,0] = 2 + + x = x*2 + assert_equal(x[0,0],4) + + x = x*0 + assert_equal(x[0,0],0) + def check_lil_lil_assignment(self): """ Tests whether a row of one lil_matrix can be assigned to another. From scipy-svn at scipy.org Wed May 2 10:29:37 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 2 May 2007 09:29:37 -0500 (CDT) Subject: [Scipy-svn] r2952 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070502142937.BF46D39C0CE@new.scipy.org> Author: pierregm Date: 2007-05-02 09:29:34 -0500 (Wed, 02 May 2007) New Revision: 2952 Modified: trunk/Lib/sandbox/timeseries/src/cseries.c Log: fixed a problem of compatibility w/ python 2.5 (use Py_ssize_t instead of int) Modified: trunk/Lib/sandbox/timeseries/src/cseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-02 08:55:39 UTC (rev 2951) +++ trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-02 14:29:34 UTC (rev 2952) @@ -1453,7 +1453,7 @@ PyObject *key, *value; - int pos = 0; + Py_ssize_t pos = 0; while (PyDict_Next(source, &pos, &key, &value)) { PyObject *tuple_iter; @@ -1461,7 +1461,7 @@ if((tuple_iter = PyObject_GetIter(value)) == NULL) return INT_ERR_CODE; - while (item = PyIter_Next(tuple_iter)) { + while ((item = PyIter_Next(tuple_iter)) != NULL) { PyDict_SetItem(dest, item, key); Py_DECREF(item); } @@ -2113,6 +2113,7 @@ int freq_group = get_freq_group(self->freq); PyObject *string_arg, *retval; + string_arg = NULL; if (freq_group == FR_ANN) { string_arg = Py_BuildValue("(s)", "%Y"); } else if (freq_group == FR_QTR) { string_arg = Py_BuildValue("(s)", "%FQ%q"); } else if (freq_group == FR_MTH) { string_arg = Py_BuildValue("(s)", "%b-%Y"); } @@ -2275,7 +2276,7 @@ if (obj1->value < obj2->value) return -1; if (obj1->value > obj2->value) return 1; if (obj1->value == obj2->value) return 0; - + return -1; } static long @@ -3067,6 +3068,7 @@ MEM_CHECK(*result_mask) result_mask_tmp = (PyArrayObject**)result_mask; (*result_mask_tmp)->flags = ((*result_mask_tmp)->flags) | NPY_OWNDATA; + return 0; } /* computation portion of moving sum. Appropriate mask is overlayed on top From scipy-svn at scipy.org Wed May 2 10:30:16 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 2 May 2007 09:30:16 -0500 (CDT) Subject: [Scipy-svn] r2953 - trunk/Lib/sandbox/timeseries Message-ID: <20070502143016.C57FF39C140@new.scipy.org> Author: pierregm Date: 2007-05-02 09:30:15 -0500 (Wed, 02 May 2007) New Revision: 2953 Modified: trunk/Lib/sandbox/timeseries/__init__.py Log: Modified: trunk/Lib/sandbox/timeseries/__init__.py =================================================================== --- trunk/Lib/sandbox/timeseries/__init__.py 2007-05-02 14:29:34 UTC (rev 2952) +++ trunk/Lib/sandbox/timeseries/__init__.py 2007-05-02 14:30:15 UTC (rev 2953) @@ -1,7 +1,19 @@ """TimeSeries -Support for time series in numpy/scipy +__author__ = "Pierre GF Gerard-Marchant & Matt Knox ($Author$)" +__version__ = '1.0' +__revision__ = "$Revision$" +__date__ = '$Date$' + +import tcore +from tcore import * +import tdates +from tdates import * +import tseries +from tseries import * +import tmulti +from tmulti import * :author: Pierre GF Gerard-Marchant & Matt Knox :contact: pierregm_at_uga_dot_edu - mattknox_ca_at_hotmail_dot_com :version: $Id$ @@ -24,10 +36,15 @@ import tmulti from tmulti import * import reportlib + from reportlib import * import lib from lib import filters, interpolate, moving_funcs + +__all__ = ['tdates', 'tseries','tmulti','reportlib','filters','interpolate'] +__all__ += tdates.__all__ +__all__ += tseries.__all__ __all__ = ['const', 'tdates','tseries','tmulti','reportlib','filters', 'interpolate', 'moving_funcs'] @@ -35,3 +52,4 @@ __all__ += tseries.__all__ __all__ += tmulti.__all__ __all__ += reportlib.__all__ + From scipy-svn at scipy.org Thu May 3 02:18:02 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 01:18:02 -0500 (CDT) Subject: [Scipy-svn] r2954 - trunk/Lib/io Message-ID: <20070503061802.9B40639C065@new.scipy.org> Author: oliphant Date: 2007-05-03 01:17:49 -0500 (Thu, 03 May 2007) New Revision: 2954 Added: trunk/Lib/io/netcdf.py Modified: trunk/Lib/io/__init__.py Log: Add netcdf file reader to scipy.io Modified: trunk/Lib/io/__init__.py =================================================================== --- trunk/Lib/io/__init__.py 2007-05-02 14:30:15 UTC (rev 2953) +++ trunk/Lib/io/__init__.py 2007-05-03 06:17:49 UTC (rev 2954) @@ -7,7 +7,10 @@ from numpyio import packbits, unpackbits, bswap, fread, fwrite, \ convert_objectarray +# matfile read and write from mio import * +# netCDF file support +from netcdf import * from npfile import npfile from recaster import sctype_attributes, Recaster from array_import import * Added: trunk/Lib/io/netcdf.py =================================================================== --- trunk/Lib/io/netcdf.py 2007-05-02 14:30:15 UTC (rev 2953) +++ trunk/Lib/io/netcdf.py 2007-05-03 06:17:49 UTC (rev 2954) @@ -0,0 +1,259 @@ +"""NetCDF file reader. + +This is adapted from Roberto De Almeida's Pupynere PUre PYthon NEtcdf REader. + +classes changed to underscore_separated instead of CamelCase + +TODO: + + Add write capability. +""" + +#__author__ = "Roberto De Almeida " + + +__all__ = ['netcdf_file', 'netcdf_variable'] + +import struct +import itertools +import mmap + +from numpy import ndarray, zeros, array + + +ABSENT = '\x00' * 8 +ZERO = '\x00' * 4 +NC_BYTE = '\x00\x00\x00\x01' +NC_CHAR = '\x00\x00\x00\x02' +NC_SHORT = '\x00\x00\x00\x03' +NC_INT = '\x00\x00\x00\x04' +NC_FLOAT = '\x00\x00\x00\x05' +NC_DOUBLE = '\x00\x00\x00\x06' +NC_DIMENSION = '\x00\x00\x00\n' +NC_VARIABLE = '\x00\x00\x00\x0b' +NC_ATTRIBUTE = '\x00\x00\x00\x0c' + + +class netcdf_file(object): + """A NetCDF file parser.""" + + def __init__(self, file): + self._buffer = open(file, 'rb') + self._parse() + + def read(self, size=-1): + """Alias for reading the file buffer.""" + return self._buffer.read(size) + + def _parse(self): + """Initial parsing of the header.""" + # Check magic bytes. + assert self.read(3) == 'CDF' + + # Read version byte. + byte = self.read(1) + self.version_byte = struct.unpack('>b', byte)[0] + + # Read header info. + self._numrecs() + self._dim_array() + self._gatt_array() + self._var_array() + + def _numrecs(self): + """Read number of records.""" + self._nrecs = self._unpack_int() + + def _dim_array(self): + """Read a dict with dimensions names and sizes.""" + assert self.read(4) in [ZERO, NC_DIMENSION] + count = self._unpack_int() + + self.dimensions = {} + self._dims = [] + for dim in range(count): + name = self._read_string() + length = self._unpack_int() + if length == 0: length = None # record dimension + self.dimensions[name] = length + self._dims.append(name) # preserve dim order + + def _gatt_array(self): + """Read global attributes.""" + self.attributes = self._att_array() + + # Update __dict__ for compatibility with S.IO.N + self.__dict__.update(self.attributes) + + def _att_array(self): + """Read a dict with attributes.""" + assert self.read(4) in [ZERO, NC_ATTRIBUTE] + count = self._unpack_int() + + # Read attributes. + attributes = {} + for attribute in range(count): + name = self._read_string() + nc_type = self._unpack_int() + n = self._unpack_int() + + # Read value for attributes. + attributes[name] = self._read_values(n, nc_type) + + return attributes + + def _var_array(self): + """Read all variables.""" + assert self.read(4) in [ZERO, NC_VARIABLE] + + # Read size of each record, in bytes. + self._read_recsize() + + # Read variables. + self.variables = {} + count = self._unpack_int() + for variable in range(count): + name = self._read_string() + self.variables[name] = self._read_var() + + def _read_recsize(self): + """Read all variables and compute record bytes.""" + pos = self._buffer.tell() + + recsize = 0 + count = self._unpack_int() + for variable in range(count): + name = self._read_string() + n = self._unpack_int() + isrec = False + for i in range(n): + dimid = self._unpack_int() + name = self._dims[dimid] + dim = self.dimensions[name] + if dim is None and i == 0: + isrec = True + attributes = self._att_array() + nc_type = self._unpack_int() + vsize = self._unpack_int() + begin = [self._unpack_int, self._unpack_int64][self.version_byte-1]() + + if isrec: recsize += vsize + + self._recsize = recsize + self._buffer.seek(pos) + + def _read_var(self): + dimensions = [] + shape = [] + n = self._unpack_int() + isrec = False + for i in range(n): + dimid = self._unpack_int() + name = self._dims[dimid] + dimensions.append(name) + dim = self.dimensions[name] + if dim is None and i == 0: + dim = self._nrecs + isrec = True + shape.append(dim) + dimensions = tuple(dimensions) + shape = tuple(shape) + + attributes = self._att_array() + nc_type = self._unpack_int() + vsize = self._unpack_int() + + # Read offset. + begin = [self._unpack_int, self._unpack_int64][self.version_byte-1]() + + return netcdf_variable(self._buffer.fileno(), nc_type, vsize, begin, shape, dimensions, attributes, isrec, self._recsize) + + def _read_values(self, n, nc_type): + bytes = [1, 1, 2, 4, 4, 8] + typecodes = ['b', 'c', 'h', 'i', 'f', 'd'] + + count = n * bytes[nc_type-1] + values = self.read(count) + padding = self.read((4 - (count % 4)) % 4) + + typecode = typecodes[nc_type-1] + if nc_type != 2: # not char + values = struct.unpack('>%s' % (typecode * n), values) + values = array(values, dtype=typecode) + else: + # Remove EOL terminator. + if values.endswith('\x00'): values = values[:-1] + + return values + + def _unpack_int(self): + return struct.unpack('>i', self.read(4))[0] + _unpack_int32 = _unpack_int + + def _unpack_int64(self): + return struct.unpack('>q', self.read(8))[0] + + def _read_string(self): + count = struct.unpack('>i', self.read(4))[0] + s = self.read(count) + # Remove EOL terminator. + if s.endswith('\x00'): s = s[:-1] + padding = self.read((4 - (count % 4)) % 4) + return s + + def close(self): + self._buffer.close() + + +class netcdf_variable(object): + def __init__(self, fileno, nc_type, vsize, begin, shape, dimensions, attributes, isrec=False, recsize=0): + self._nc_type = nc_type + self._vsize = vsize + self._begin = begin + self.shape = shape + self.dimensions = dimensions + self.attributes = attributes # for ``dap.plugins.netcdf`` + self.__dict__.update(attributes) + self._is_record = isrec + + # Number of bytes and type. + self._bytes = [1, 1, 2, 4, 4, 8][self._nc_type-1] + type_ = ['i', 'S', 'i', 'i', 'f', 'f'][self._nc_type-1] + dtype = '>%s%d' % (type_, self._bytes) + bytes = self._begin + self._vsize + + if isrec: + # Record variables are not stored contiguosly on disk, so we + # need to create a separate array for each record. + self.__array_data__ = zeros(shape, dtype) + bytes += (shape[0] - 1) * recsize + for n in range(shape[0]): + offset = self._begin + (n * recsize) + mm = mmap.mmap(fileno, bytes, access=mmap.ACCESS_READ) + self.__array_data__[n] = ndarray.__new__(ndarray, shape[1:], dtype=dtype, buffer=mm, offset=offset, order=0) + else: + # Create buffer and data. + mm = mmap.mmap(fileno, bytes, access=mmap.ACCESS_READ) + self.__array_data__ = ndarray.__new__(ndarray, shape, dtype=dtype, buffer=mm, offset=self._begin, order=0) + + # N-D array interface + self.__array_interface__ = {'shape' : shape, + 'typestr': dtype, + 'data' : self.__array_data__, + 'version': 3, + } + + def __getitem__(self, index): + return self.__array_data__.__getitem__(index) + + def getValue(self): + """For scalars.""" + return self.__array_data__.item() + + def typecode(self): + return ['b', 'c', 'h', 'i', 'f', 'd'][self._nc_type-1] + + +def _test(): + import doctest + doctest.testmod() From scipy-svn at scipy.org Thu May 3 10:13:11 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 09:13:11 -0500 (CDT) Subject: [Scipy-svn] r2955 - trunk/Lib/sandbox/timeseries Message-ID: <20070503141311.01D5539C171@new.scipy.org> Author: mattknox_ca Date: 2007-05-03 09:13:04 -0500 (Thu, 03 May 2007) New Revision: 2955 Modified: trunk/Lib/sandbox/timeseries/tdates.py Log: allowed unicode strings to be passed to listparser Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 06:17:49 UTC (rev 2954) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 14:13:04 UTC (rev 2955) @@ -491,7 +491,7 @@ if dlist.ndim == 0: dlist.shape = (1,) # Case #1: dates as strings ................. - if dlist.dtype.kind == 'S': + if dlist.dtype.kind in 'SU': #...construct a list of ordinals ords = numpy.fromiter((DateTimeFromString(s).toordinal() for s in dlist), float_) From scipy-svn at scipy.org Thu May 3 11:02:13 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 10:02:13 -0500 (CDT) Subject: [Scipy-svn] r2956 - trunk/Lib/sandbox/timeseries Message-ID: <20070503150213.53C6D39C193@new.scipy.org> Author: mattknox_ca Date: 2007-05-03 10:01:45 -0500 (Thu, 03 May 2007) New Revision: 2956 Modified: trunk/Lib/sandbox/timeseries/tdates.py Log: changed default for relation parameter of DateArray asfreq method to "AFTER" Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 14:13:04 UTC (rev 2955) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 15:01:45 UTC (rev 2956) @@ -354,7 +354,7 @@ self._cachedinfo['tostr'] = tostr return self._cachedinfo['tostr'] # - def asfreq(self, freq=None, relation="BEFORE"): + def asfreq(self, freq=None, relation="AFTER"): "Converts the dates to another frequency." # Note: As we define a new object, we don't need caching if freq is None or freq == _c.FR_UND: From scipy-svn at scipy.org Thu May 3 13:37:26 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 12:37:26 -0500 (CDT) Subject: [Scipy-svn] r2957 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070503173726.0CB9039C05D@new.scipy.org> Author: mattknox_ca Date: 2007-05-03 12:37:21 -0500 (Thu, 03 May 2007) New Revision: 2957 Modified: trunk/Lib/sandbox/timeseries/src/cseries.c Log: got rid of some warnings Modified: trunk/Lib/sandbox/timeseries/src/cseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-03 15:01:45 UTC (rev 2956) +++ trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-03 17:37:21 UTC (rev 2957) @@ -1470,7 +1470,7 @@ return 0; } -static int build_freq_dict() { +static int build_freq_dict(void) { char ANN_prefixes[8][15] = { "A", "Y", "ANN", "ANNUAL", "ANNUALLY", "YR", "YEAR", "YEARLY" }; @@ -1671,7 +1671,7 @@ /* for use in C code */ static DateObject * -DateObject_New() { +DateObject_New(void) { PyObject *dummy; return (DateObject*)DateObject_new(&DateType, dummy, dummy); } @@ -1989,7 +1989,7 @@ struct date_info tempDate; long absdate; double abstime; - int i, result_len, special_found=0; + int i, result_len; PyObject *py_result; long (*toDaily)(long, char, struct asfreq_info*) = NULL; @@ -2721,7 +2721,8 @@ long newStart, newStartTemp; long newEnd, newEndTemp; long newLen, newHeight; - long i, currIndex, prevIndex; + int i; + long currIndex, prevIndex; long nd; npy_intp *dim, *newIdx; long currPerLen; @@ -3719,9 +3720,9 @@ Py_DECREF(ops_dict); Py_INCREF(&DateType); - PyModule_AddObject(m, "Date", (PyObject *)&DateType); + PyModule_AddObject(m, "Date", (PyObject *)(&DateType)); - if(build_freq_dict(m) == INT_ERR_CODE) { + if(build_freq_dict() == INT_ERR_CODE) { PyErr_SetString( \ PyExc_ImportError, \ "initialization of module timeseries.cseries failed"); @@ -3735,4 +3736,4 @@ PyModule_AddObject(m, "DateCalc_Error", DateCalc_Error); PyModule_AddObject(m, "DateCalc_RangeError", DateCalc_RangeError); -} \ No newline at end of file +} From scipy-svn at scipy.org Thu May 3 14:52:02 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 13:52:02 -0500 (CDT) Subject: [Scipy-svn] r2958 - trunk/Lib/sandbox/timeseries Message-ID: <20070503185202.874B339C05D@new.scipy.org> Author: mattknox_ca Date: 2007-05-03 13:51:58 -0500 (Thu, 03 May 2007) New Revision: 2958 Modified: trunk/Lib/sandbox/timeseries/tdates.py Log: fixed bug with get_steps checking for undefined frequency improperly Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 17:37:21 UTC (rev 2957) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 18:51:58 UTC (rev 2958) @@ -399,7 +399,7 @@ def get_steps(self): """Returns the time steps between consecutive dates. The timesteps have the same unit as the frequency of the series.""" - if self.freq == 'U': + if self.freq == _c.FR_UND: warnings.warn("Undefined frequency: assuming integers!") if self._cachedinfo['steps'] is None: _cached = self._cachedinfo From scipy-svn at scipy.org Thu May 3 14:52:49 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 3 May 2007 13:52:49 -0500 (CDT) Subject: [Scipy-svn] r2959 - trunk/Lib/sandbox/timeseries Message-ID: <20070503185249.813DD39C014@new.scipy.org> Author: mattknox_ca Date: 2007-05-03 13:52:46 -0500 (Thu, 03 May 2007) New Revision: 2959 Modified: trunk/Lib/sandbox/timeseries/tseries.py Log: raise exception when undefined frequency used in convert method/function Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-03 18:51:58 UTC (rev 2958) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-03 18:52:46 UTC (rev 2959) @@ -690,10 +690,6 @@ if freq is None: return self return TimeSeries(self._series, dates=self._dates.asfreq(freq)) - - def convert(self, freq, func='auto', position='END'): - "Converts the dates to another frequency, and adapt the data." - return convert(self, freq, func=func, position=position) #..................................................... def transpose(self, *axes): """ a.transpose(*axes) @@ -1182,6 +1178,18 @@ """ if not isinstance(series,TimeSeries): raise TypeError, "The argument should be a valid TimeSeries!" + + toFreq = check_freq(freq) + fromFreq = series.freq + + if toFreq == _c.FR_UND: + raise TimeSeriesError, \ + "Cannot convert a series to UNDEFINED frequency." + + if fromFreq == _c.FR_UND: + raise TimeSeriesError, \ + "Cannot convert a series with UNDEFINED frequency." + if not series.isvalid(): raise TimeSeriesError, \ "Cannot adjust a series with missing or duplicated dates." @@ -1189,9 +1197,6 @@ if position.upper() not in ('END','START'): raise ValueError("invalid value for position argument: (%s)",str(position)) - toFreq = freq - fromFreq = series.freq - start_date = series._dates[0] if series.size == 0: From scipy-svn at scipy.org Fri May 4 11:43:38 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 4 May 2007 10:43:38 -0500 (CDT) Subject: [Scipy-svn] r2960 - in trunk/Lib/sandbox/timeseries: . plotlib Message-ID: <20070504154338.4E16B39C098@new.scipy.org> Author: pierregm Date: 2007-05-04 10:43:31 -0500 (Fri, 04 May 2007) New Revision: 2960 Modified: trunk/Lib/sandbox/timeseries/ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tseries.py Log: mpl_timeseries : prevent series as a keyword of add_tsplot to be overwritten if the figure has no linked series yet. tseries : quicki fix : raise an exception if the input series has more than one variable (to prevent segfault). Property changes on: trunk/Lib/sandbox/timeseries ___________________________________________________________________ Name: svn:ignore - sandbox quicktest.py build + sandbox quicktest.py build addons buildlog Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-03 18:52:46 UTC (rev 2959) +++ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-04 15:43:31 UTC (rev 2960) @@ -833,8 +833,9 @@ #......... def add_tsplot(self, *args, **kwargs): """Adds a `TimeSeriesPlot` subplot to the figure.""" - kwargs.update(SubplotClass=TimeSeriesPlot, - series=self._series) + kwargs.update(SubplotClass=TimeSeriesPlot) + if self._series is not None: + kwargs.update(series=self._series) return add_generic_subplot(self, *args, **kwargs) add_plot = add_tsplot TSFigure = TimeSeriesFigure Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-03 18:52:46 UTC (rev 2959) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-04 15:43:31 UTC (rev 2960) @@ -3,12 +3,12 @@ :author: Pierre GF Gerard-Marchant & Matt Knox :contact: pierregm_at_uga_dot_edu - mattknox_ca_at_hotmail_dot_com -:version: $Id: tdates.py 2815 2007-03-02 16:39:07Z mattknox_ca $ +:version: $Id$ """ -__author__ = "Pierre GF Gerard-Marchant & Matt Knox ($Author: mattknox_ca $)" +__author__ = "Pierre GF Gerard-Marchant & Matt Knox ($Author$)" __version__ = '1.0' -__revision__ = "$Revision: 2815 $" -__date__ = '$Date: 2007-03-02 11:39:07 -0500 (Fri, 02 Mar 2007) $' +__revision__ = "$Revision$" +__date__ = '$Date$' import datetime as dt Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-03 18:52:46 UTC (rev 2959) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-04 15:43:31 UTC (rev 2960) @@ -43,6 +43,7 @@ import cseries + __all__ = [ 'TimeSeriesError','TimeSeriesCompatibilityError','TimeSeries','isTimeSeries', 'time_series', 'tsmasked', @@ -1208,6 +1209,9 @@ tempData = series._series.filled() tempMask = getmaskarray(series) + if (tempData.size // series._dates.size) > 1: + raise TimeSeriesError("convert works with 1D data only !") + cRetVal = cseries.TS_convert(tempData, fromFreq, toFreq, position, int(start_date), tempMask) _values = cRetVal['values'] From scipy-svn at scipy.org Fri May 4 14:00:32 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 4 May 2007 13:00:32 -0500 (CDT) Subject: [Scipy-svn] r2961 - in trunk/Lib/sandbox/timeseries: . tests Message-ID: <20070504180032.AC3E139C030@new.scipy.org> Author: pierregm Date: 2007-05-04 13:00:28 -0500 (Fri, 04 May 2007) New Revision: 2961 Modified: trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tests/test_timeseries.py trunk/Lib/sandbox/timeseries/tseries.py Log: tdates : added a _unsorted attributes, that keeps track of the initial order of the dates tseries: ensures that the data are sorted along the dates Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-04 15:43:31 UTC (rev 2960) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-04 18:00:28 UTC (rev 2961) @@ -195,6 +195,7 @@ _dates.shape = (1,) _dates = _dates.view(cls) _dates.freq = _freq + _dates._unsorted = None return _dates def __array_wrap__(self, obj, context=None): @@ -205,6 +206,7 @@ def __array_finalize__(self, obj): self.freq = getattr(obj, 'freq', _c.FR_UND) + self._unsorted = getattr(obj,'_unsorted',None) self._cachedinfo = dict(toobj=None, tostr=None, toord=None, steps=None, full=None, hasdups=None) if hasattr(obj,'_cachedinfo'): @@ -487,7 +489,8 @@ def _listparser(dlist, freq=None): "Constructs a DateArray from a list." dlist = numeric.asarray(dlist) - dlist.sort() + idx = dlist.argsort() + dlist = dlist[idx] if dlist.ndim == 0: dlist.shape = (1,) # Case #1: dates as strings ................. @@ -531,6 +534,7 @@ dates = [Date(freq, datetime=dt.datetime.fromordinal(a)) for a in ords] # result = DateArray(dates, freq) + result._unsorted = idx return result @@ -695,4 +699,4 @@ if 1: "Tests the automatic sorting of dates." D = date_array_fromlist(dlist=['2006-01','2005-01','2004-01'],freq='M') - assert_equal(D.view(N.ndarray), [24037, 24049, 24061]) \ No newline at end of file + assert_equal(D.view(ndarray), [24037, 24049, 24061]) \ No newline at end of file Modified: trunk/Lib/sandbox/timeseries/tests/test_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-04 15:43:31 UTC (rev 2960) +++ trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-04 18:00:28 UTC (rev 2961) @@ -73,6 +73,7 @@ def test_fromdatearray(self): + "Tests the creation of a series from a datearray" _, dates, _ = self.d data = dates @@ -91,10 +92,28 @@ def test_datafromlist(self): + "Check the creation of a time series from a list of data." (_, dates, _) = self.d data = list(range(15)) series = time_series(data, dates) assert_equal(series._data.size, 15) + + def test_unsorted(self): + "Tests that the data are porperly sorted along the dates." + dlist = ['2007-01-%02i' % i for i in (3,2,1)] + data = [10,20,30] + series = time_series(data,dlist) + assert_equal(series._data,[30,20,10]) + # + series = TimeSeries(data, dlist) + assert_equal(series._data,[30,20,10]) + # + series = TimeSeries(data, dlist, mask=[1,0,0]) + assert_equal(series._mask,[0,0,1]) + # + data = masked_array([10,20,30],mask=[1,0,0]) + series = TimeSeries(data, dlist) + assert_equal(series._mask,[0,0,1]) #............................................................................... class test_arithmetics(NumpyTestCase): @@ -463,7 +482,7 @@ assert_equal(empty_ts.end_date, None) def test__timeseriescompat_multiple(self): - + "Tests the compatibility of multiple time series." seriesM_10 = time_series(N.arange(10), date_array( start_date=Date(freq='m', year=2005, month=1), Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-04 15:43:31 UTC (rev 2960) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-04 18:00:28 UTC (rev 2961) @@ -324,11 +324,9 @@ maparms = dict(copy=copy, dtype=dtype, fill_value=fill_value, keep_mask=keep_mask, small_mask=small_mask, hard_mask=hard_mask,) - # Get the data ............................... - _data = MaskedArray(data, mask=mask, **maparms).view(cls) + _data = MaskedArray(data, mask=mask, **maparms) # Get the frequency .......................... freq = check_freq(freq) - # Get the dates .............................. if dates is None: newdates = getattr(data, '_dates', None) @@ -350,7 +348,10 @@ newdates = date_array([], freq=freq) # Get observed ............................... observed = getattr(data, 'observed', fmtObserv(observed)) - + # Get the data ............................... + if newdates._unsorted is not None: + _data = _data[newdates._unsorted] + _data = _data.view(cls) if _data is masked: assert(numeric.size(newdates)==1) return _data.view(cls) @@ -373,6 +374,7 @@ return result #............................................ def _get_series(self): + "Returns the series as a regular masked array." if self._mask.ndim == 0 and self._mask: return masked return self.view(MaskedArray) @@ -945,10 +947,16 @@ dates = date_array([], freq=freq) elif not isinstance(dates, DateArray): dates = date_array(dlist=dates, freq=freq) - return TimeSeries(data=data, dates=dates, mask=mask, observed=observed, - copy=copy, dtype=dtype, fill_value=fill_value, - keep_mask=keep_mask, small_mask=small_mask, - hard_mask=hard_mask,) + if dates._unsorted is not None: + idx = dates._unsorted + data = data[idx] + if mask is not nomask: + mask = mask[idx] + dates._unsorted = None + return TimeSeries(data=data, dates=dates, mask=mask, + observed=observed, copy=copy, dtype=dtype, + fill_value=fill_value, keep_mask=keep_mask, + small_mask=small_mask, hard_mask=hard_mask,) def isTimeSeries(series): @@ -1452,6 +1460,7 @@ return newseries #............................................................................... def empty_like(series): + """Returns an empty series with the same dtype, mask and dates as series.""" result = N.empty_like(series).view(type(series)) result._dates = series._dates result._mask = series._mask From scipy-svn at scipy.org Fri May 4 23:14:40 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 4 May 2007 22:14:40 -0500 (CDT) Subject: [Scipy-svn] r2962 - trunk/Lib/stats/tests Message-ID: <20070505031440.4A41239C010@new.scipy.org> Author: rkern Date: 2007-05-04 22:14:38 -0500 (Fri, 04 May 2007) New Revision: 2962 Modified: trunk/Lib/stats/tests/test_morestats.py Log: Make the Anderson tests deterministic. Modified: trunk/Lib/stats/tests/test_morestats.py =================================================================== --- trunk/Lib/stats/tests/test_morestats.py 2007-05-04 18:00:28 UTC (rev 2961) +++ trunk/Lib/stats/tests/test_morestats.py 2007-05-05 03:14:38 UTC (rev 2962) @@ -10,6 +10,7 @@ restore_path() import numpy as N +from numpy.random import RandomState g1 = [1.006, 0.996, 0.998, 1.000, 0.992, 0.993, 1.002, 0.999, 0.994, 1.000] g2 = [0.998, 1.006, 1.000, 1.002, 0.997, 0.998, 0.996, 1.000, 1.006, 0.988] @@ -39,20 +40,18 @@ class test_anderson(NumpyTestCase): def check_normal(self): - x1 = scipy.stats.expon.rvs(size=50) - x2 = scipy.stats.norm.rvs(size=50) + rs = RandomState(1234567890) + x1 = rs.standard_exponential(size=50) + x2 = rs.standard_normal(size=50) A,crit,sig = scipy.stats.anderson(x1) assert_array_less(crit[:-1], A) A,crit,sig = scipy.stats.anderson(x2) - try: - assert_array_less(A, crit[-2:]) - except: - A, crit, sig = scipy.stats.anderson(x2) - assert_array_less(A, crit[-2:]) + assert_array_less(A, crit[-2:]) def check_expon(self): - x1 = scipy.stats.expon.rvs(size=50) - x2 = scipy.stats.norm.rvs(size=50) + rs = RandomState(1234567890) + x1 = rs.standard_exponential(size=50) + x2 = rs.standard_normal(size=50) A,crit,sig = scipy.stats.anderson(x1,'expon') assert_array_less(A, crit[-2:]) A,crit,sig = scipy.stats.anderson(x2,'expon') From scipy-svn at scipy.org Sat May 5 04:52:21 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 03:52:21 -0500 (CDT) Subject: [Scipy-svn] r2963 - in trunk/Lib/ndimage: . tests Message-ID: <20070505085221.B15A039C079@new.scipy.org> Author: stefan Date: 2007-05-05 03:52:03 -0500 (Sat, 05 May 2007) New Revision: 2963 Modified: trunk/Lib/ndimage/measurements.py trunk/Lib/ndimage/tests/test_ndimage.py Log: Reject indices of type uint64/int64. Update documentation. Modified: trunk/Lib/ndimage/measurements.py =================================================================== --- trunk/Lib/ndimage/measurements.py 2007-05-05 03:14:38 UTC (rev 2962) +++ trunk/Lib/ndimage/measurements.py 2007-05-05 08:52:03 UTC (rev 2963) @@ -87,22 +87,39 @@ max_label = input.max() return _nd_image.find_objects(input, max_label) -def sum(input, labels = None, index = None): +def sum(input, labels=None, index=None): """Calculate the sum of the values of the array. - The index parameter is a single label number or a sequence of - label numbers of the objects to be measured. If index is None, all - values are used where labels is larger than zero. + :Parameters: + index : scalar or array + A single label number or a sequence of label numbers of + the objects to be measured. If index is None, all + values are used where 'labels' is larger than zero. + + labels : array of same shape as input + Assign labels to the values of the array. For example, + if + + input = [0,1,2,3] and + labels = [1,1,2,2] + + then sum(input, labels, index=[1,2]) would yield [1,5]. + """ input = numpy.asarray(input) if numpy.iscomplexobj(input): raise TypeError, 'Complex type not supported' - if labels != None: + if labels is not None: labels = numpy.asarray(labels) labels = _broadcast(labels, input.shape) if labels.shape != input.shape: raise RuntimeError, 'input and labels shape are not equal' + if index is not None: + index = numpy.asarray(index) + if numpy.issubsctype(index.dtype,numpy.int64) or \ + numpy.issubsctype(index.dtype,numpy.uint64): + raise ValueError("Index values cannot be of type int64/uint64.") return _nd_image.statistics(input, labels, index, 0) Modified: trunk/Lib/ndimage/tests/test_ndimage.py =================================================================== --- trunk/Lib/ndimage/tests/test_ndimage.py 2007-05-05 03:14:38 UTC (rev 2962) +++ trunk/Lib/ndimage/tests/test_ndimage.py 2007-05-05 08:52:03 UTC (rev 2963) @@ -2823,6 +2823,13 @@ index = [4, 8, 2]) self.failUnless(output == [4.0, 0.0, 5.0]) + def test_sum13(self): + "sum 13" + input = numpy.array([1,2,3,4]) + labels = numpy.array([0,0,0,0]) + index = numpy.array([0],numpy.uint64) + self.failUnlessRaises(ValueError,ndimage.sum,input,labels,index) + def test_mean01(self): "mean 1" labels = numpy.array([1, 0], bool) From scipy-svn at scipy.org Sat May 5 05:11:54 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 04:11:54 -0500 (CDT) Subject: [Scipy-svn] r2964 - in trunk/Lib/cluster: . tests Message-ID: <20070505091154.5E82139C094@new.scipy.org> Author: cdavid Date: 2007-05-05 04:11:47 -0500 (Sat, 05 May 2007) New Revision: 2964 Modified: trunk/Lib/cluster/tests/test_vq.py trunk/Lib/cluster/vq.py Log: Fix clusters.kmeans2 for 1d dimension data, generate better for unsupported rank 1 arrays and fix test for _vq Modified: trunk/Lib/cluster/tests/test_vq.py =================================================================== --- trunk/Lib/cluster/tests/test_vq.py 2007-05-05 08:52:03 UTC (rev 2963) +++ trunk/Lib/cluster/tests/test_vq.py 2007-05-05 09:11:47 UTC (rev 2964) @@ -1,7 +1,7 @@ #! /usr/bin/env python # David Cournapeau -# Last Change: Thu Apr 26 09:00 PM 2007 J +# Last Change: Sat May 05 06:00 PM 2007 J # For now, just copy the tests from sandbox.pyem, so we can check that # kmeans works OK for trivial examples. @@ -13,6 +13,12 @@ set_package_path() from cluster.vq import kmeans, kmeans2, py_vq, py_vq2, _py_vq_1d +try: + from cluster import _vq + TESTC=True +except ImportError: + print "== Error while importing _vq, not testing C imp of vq ==" + TESTC=False restore_path() #Optional: @@ -53,12 +59,11 @@ def check_vq(self, level=1): initc = N.concatenate(([[X[0]], [X[1]], [X[2]]])) code = initc.copy() - try: - import _vq + if TESTC: label1 = _vq.double_vq(X, initc)[0] assert_array_equal(label1, LABEL1) - except ImportError: - print "== Error while importing _vq, not testing C imp of vq ==" + else: + print "== not testing C imp of vq ==" #def check_vq_1d(self, level=1): # data = X[:, 0] @@ -115,5 +120,11 @@ kmeans2(data, 3, minit = 'random') kmeans2(data, 3, minit = 'points') + # Check special case 1d + data = data[:, :1] + kmeans2(data, 3, minit = 'random') + kmeans2(data, 3, minit = 'points') + + if __name__ == "__main__": NumpyTest().run() Modified: trunk/Lib/cluster/vq.py =================================================================== --- trunk/Lib/cluster/vq.py 2007-05-05 08:52:03 UTC (rev 2963) +++ trunk/Lib/cluster/vq.py 2007-05-05 09:11:47 UTC (rev 2964) @@ -446,8 +446,8 @@ Number of samples to generate. """ - mu = N.mean(data, 0) - cov = N.cov(data, rowvar = 0) + mu = N.mean(data, 0) + cov = N.atleast_2d(N.cov(data, rowvar = 0)) # k rows, d cols (one row = one obs) # Generate k sample of a random variable ~ Gaussian(mu, cov) @@ -500,6 +500,7 @@ nd = N.ndim(data) if nd == 1: d = 1 + raise ValueError("Input of rank 1 not supported yet") elif nd == 2: d = data.shape[1] else: From scipy-svn at scipy.org Sat May 5 05:26:27 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 04:26:27 -0500 (CDT) Subject: [Scipy-svn] r2965 - trunk/Lib/ndimage Message-ID: <20070505092627.8A43B39C094@new.scipy.org> Author: stefan Date: 2007-05-05 04:26:12 -0500 (Sat, 05 May 2007) New Revision: 2965 Modified: trunk/Lib/ndimage/measurements.py Log: Fix scalar conversion on 64-bit platforms. Modified: trunk/Lib/ndimage/measurements.py =================================================================== --- trunk/Lib/ndimage/measurements.py 2007-05-05 09:11:47 UTC (rev 2964) +++ trunk/Lib/ndimage/measurements.py 2007-05-05 09:26:12 UTC (rev 2965) @@ -116,7 +116,11 @@ if labels.shape != input.shape: raise RuntimeError, 'input and labels shape are not equal' if index is not None: - index = numpy.asarray(index) + if numpy.isscalar(index): + index = numpy.asarray(index,dtype=numpy.uint32) + else: + index = numpy.asarray(index) + if numpy.issubsctype(index.dtype,numpy.int64) or \ numpy.issubsctype(index.dtype,numpy.uint64): raise ValueError("Index values cannot be of type int64/uint64.") From scipy-svn at scipy.org Sat May 5 08:25:36 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 07:25:36 -0500 (CDT) Subject: [Scipy-svn] r2966 - trunk/Lib/ndimage Message-ID: <20070505122536.5B5D539C0B1@new.scipy.org> Author: stefan Date: 2007-05-05 07:25:22 -0500 (Sat, 05 May 2007) New Revision: 2966 Modified: trunk/Lib/ndimage/measurements.py Log: Force index to be of type int32 on 64-bit platform. Modified: trunk/Lib/ndimage/measurements.py =================================================================== --- trunk/Lib/ndimage/measurements.py 2007-05-05 09:26:12 UTC (rev 2965) +++ trunk/Lib/ndimage/measurements.py 2007-05-05 12:25:22 UTC (rev 2966) @@ -116,14 +116,11 @@ if labels.shape != input.shape: raise RuntimeError, 'input and labels shape are not equal' if index is not None: - if numpy.isscalar(index): - index = numpy.asarray(index,dtype=numpy.uint32) - else: - index = numpy.asarray(index) - - if numpy.issubsctype(index.dtype,numpy.int64) or \ - numpy.issubsctype(index.dtype,numpy.uint64): + T = getattr(index,'dtype',numpy.int32) + if numpy.issubsctype(T,numpy.int64) or \ + numpy.issubsctype(T,numpy.uint64): raise ValueError("Index values cannot be of type int64/uint64.") + index = numpy.asarray(index,dtype=T) return _nd_image.statistics(input, labels, index, 0) From scipy-svn at scipy.org Sat May 5 08:57:19 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 07:57:19 -0500 (CDT) Subject: [Scipy-svn] r2967 - trunk/Lib/ndimage Message-ID: <20070505125719.C153239C0D0@new.scipy.org> Author: stefan Date: 2007-05-05 07:57:06 -0500 (Sat, 05 May 2007) New Revision: 2967 Modified: trunk/Lib/ndimage/measurements.py Log: Do more precise type checking on indices. Modified: trunk/Lib/ndimage/measurements.py =================================================================== --- trunk/Lib/ndimage/measurements.py 2007-05-05 12:25:22 UTC (rev 2966) +++ trunk/Lib/ndimage/measurements.py 2007-05-05 12:57:06 UTC (rev 2967) @@ -91,20 +91,22 @@ """Calculate the sum of the values of the array. :Parameters: + labels : array of integers, same shape as input + Assign labels to the values of the array. + index : scalar or array A single label number or a sequence of label numbers of the objects to be measured. If index is None, all values are used where 'labels' is larger than zero. - labels : array of same shape as input - Assign labels to the values of the array. For example, - if + Examples + -------- - input = [0,1,2,3] and - labels = [1,1,2,2] + >>> input = [0,1,2,3] + >>> labels = [1,1,2,2] + >>> sum(input, labels, index=[1,2]) + [1.0, 5.0] - then sum(input, labels, index=[1,2]) would yield [1,5]. - """ input = numpy.asarray(input) if numpy.iscomplexobj(input): @@ -117,9 +119,9 @@ raise RuntimeError, 'input and labels shape are not equal' if index is not None: T = getattr(index,'dtype',numpy.int32) - if numpy.issubsctype(T,numpy.int64) or \ - numpy.issubsctype(T,numpy.uint64): - raise ValueError("Index values cannot be of type int64/uint64.") + if T not in [numpy.int8, numpy.int16, numpy.int32, + numpy.uint8, numpy.uint16, numpy.bool]: + raise ValueError("Invalid index type") index = numpy.asarray(index,dtype=T) return _nd_image.statistics(input, labels, index, 0) From scipy-svn at scipy.org Sat May 5 17:18:49 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 5 May 2007 16:18:49 -0500 (CDT) Subject: [Scipy-svn] r2968 - trunk/Lib/stats Message-ID: <20070505211849.724C139C135@new.scipy.org> Author: rkern Date: 2007-05-05 16:18:43 -0500 (Sat, 05 May 2007) New Revision: 2968 Modified: trunk/Lib/stats/distributions.py Log: Add missing methods to rv_frozen. Modified: trunk/Lib/stats/distributions.py =================================================================== --- trunk/Lib/stats/distributions.py 2007-05-05 12:57:06 UTC (rev 2967) +++ trunk/Lib/stats/distributions.py 2007-05-05 21:18:43 UTC (rev 2968) @@ -119,6 +119,12 @@ return self.dist.sf(x,*self.args,**self.kwds) def stats(self): return self.dist.stats(*self.args,**self.kwds) + def moment(self,n): + return self.dist.moment(n,*self.args,**self.kwds) + def entropy(self): + return self.dist.entropy(*self.args,**self.kwds) + def pmf(self,k): + return self.dist.pmf(k,*self.args,**self.kwds) From scipy-svn at scipy.org Sun May 6 22:38:03 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 6 May 2007 21:38:03 -0500 (CDT) Subject: [Scipy-svn] r2969 - trunk/Lib/sandbox/maskedarray Message-ID: <20070507023803.59BBA39C045@new.scipy.org> Author: pierregm Date: 2007-05-06 21:38:00 -0500 (Sun, 06 May 2007) New Revision: 2969 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/extras.py Log: core : force var to return a masked array extras : (flat)notmasked_contiguous: return a sequence of slices instead of the previous less useful (size,(start,end)) extras : dot now accepts 1D arrays. Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-05 21:18:43 UTC (rev 2968) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-07 02:38:00 UTC (rev 2969) @@ -1651,7 +1651,7 @@ cnt = self.count(axis=axis) danom = self.anom(axis=axis, dtype=dtype) danom *= danom - dvar = danom.sum(axis) / cnt + dvar = numeric.array(danom.sum(axis) / cnt).view(type(self)) if axis is not None: dvar._mask = mask_or(self._mask.all(axis), (cnt==1)) return dvar @@ -2634,7 +2634,7 @@ from maskedarray.testutils import assert_equal, assert_array_equal marray = masked_array # - if 1: + if 0: x = masked_array([1,2]) y = x * masked print y @@ -2643,3 +2643,8 @@ y = x + masked assert_equal(y.shape, x.shape) assert_equal(y._mask, [True, True]) + # + if 1: + x = arange(10) + x[0] = masked + print dot(x,x) Modified: trunk/Lib/sandbox/maskedarray/extras.py =================================================================== --- trunk/Lib/sandbox/maskedarray/extras.py 2007-05-05 21:18:43 UTC (rev 2968) +++ trunk/Lib/sandbox/maskedarray/extras.py 2007-05-07 02:38:00 UTC (rev 2969) @@ -435,8 +435,9 @@ NB: The first argument is not conjugated. """ #TODO: Works only with 2D arrays. There should be a way to get it to run with higher dimension - a = mask_rows(a) - b = mask_cols(b) + if (a.ndim == 2) and (b.ndim == 2): + a = mask_rows(a) + b = mask_cols(b) # d = numpy.dot(a.filled(0), b.filled(0)) # @@ -600,7 +601,7 @@ def flatnotmasked_contiguous(a): """Finds contiguous unmasked data in a flattened masked array. - Returns a sorted sequence of tuples (size,(start index, end index)). + Returns a sorted sequence of slices (start index, end index). """ m = getmask(a) if m is nomask: @@ -611,13 +612,14 @@ result = [] for k, group in groupby(enumerate(unmasked), lambda (i,x):i-x): tmp = numpy.fromiter((g[1] for g in group), int_) - result.append((tmp.size, tuple(tmp[[0,-1]]))) +# result.append((tmp.size, tuple(tmp[[0,-1]]))) + result.append( slice(tmp[0],tmp[-1]) ) result.sort() return result def notmasked_contiguous(a, axis=None): """Finds contiguous unmasked data in a masked array along the given axis. - Returns a sorted sequence of tuples (size,(start index, end index)). + Returns a sorted sequence of slices (start index, end index). Note: Only accepts 2D arrays at most. """ a = asarray(a) @@ -638,3 +640,10 @@ result.append( flatnotmasked_contiguous(a[idx]) ) return result +################################################################################ +if __name__ == '__main__': + # + if 1: + x = arange(10) + x[0] = masked + print dot(x,x) \ No newline at end of file From scipy-svn at scipy.org Sun May 6 23:04:31 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 6 May 2007 22:04:31 -0500 (CDT) Subject: [Scipy-svn] r2970 - in trunk/Lib/sandbox/timeseries: . plotlib Message-ID: <20070507030431.6080C39C036@new.scipy.org> Author: pierregm Date: 2007-05-06 22:04:25 -0500 (Sun, 06 May 2007) New Revision: 2970 Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py trunk/Lib/sandbox/timeseries/tseries.py Log: tseries : clean the group_byperiod: missing data are automatically filled : fill_missing_dates returns the same subclass of TimeSeries as the input : _tsaxismethods returns the same subclass of TimeSeries as the input mpl_timeseries : fixed a pb w/ check_params Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-07 02:38:00 UTC (rev 2969) +++ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-07 03:04:25 UTC (rev 2970) @@ -692,17 +692,16 @@ if self.ydata is None: raise ValueError, "No data information available!" # Otherwise.............................. - elif len(remaining) > 0: - if isinstance(remaining[0], str): - b = remaining.pop(0) - if self.xdata is None: - raise ValueError, "No date information available!" - else: - output.extend([self.xdata, a, b]) - elif self.xdata is None: + elif len(remaining) > 0 and isinstance(remaining[0], str): + b = remaining.pop(0) + if self.xdata is None: raise ValueError, "No date information available!" else: - output.extend([self.xdata, a]) + output.extend([self.xdata, a, b]) + elif self.xdata is None: + raise ValueError, "No date information available!" + else: + output.extend([self.xdata, a]) # Reinitialize the plot if needed ........... if self.xdata is None: self.xdata = output[0] Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-07 02:38:00 UTC (rev 2969) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-07 03:04:25 UTC (rev 2970) @@ -296,7 +296,9 @@ try: axis = params.get('axis', args[0]) if axis in [-1, _series.ndim-1]: - result = TimeSeries(result, dates=_dates) + result = result.view(type(self.obj)) + result._dates = _dates +# result = TimeSeries(result, dates=_dates) except IndexError: pass return result @@ -319,9 +321,9 @@ _genattributes = ['fill_value', 'observed'] def __new__(cls, data, dates=None, mask=nomask, freq=None, observed=None, start_date=None, length=None, - dtype=None, copy=False, fill_value=None, + dtype=None, copy=False, fill_value=None, subok=True, keep_mask=True, small_mask=True, hard_mask=False, **options): - maparms = dict(copy=copy, dtype=dtype, fill_value=fill_value, + maparms = dict(copy=copy, dtype=dtype, fill_value=fill_value,subok=subok, keep_mask=keep_mask, small_mask=small_mask, hard_mask=hard_mask,) _data = MaskedArray(data, mask=mask, **maparms) @@ -351,7 +353,8 @@ # Get the data ............................... if newdates._unsorted is not None: _data = _data[newdates._unsorted] - _data = _data.view(cls) + if not subok or not isinstance(_data,TimeSeries): + _data = _data.view(cls) if _data is masked: assert(numeric.size(newdates)==1) return _data.view(cls) @@ -1238,10 +1241,16 @@ newseries.copy_attributes(series) return newseries -def group_byperiod(series, freq, func='auto', position='END'): - """Converts a series to a frequency, without any processing. +def group_byperiod(series, freq, position='END'): + """Converts a series to a frequency, without any processing. If the series + has missing data, it is first filled with masked data. Duplicate values in the + series will raise an exception. """ - return convert(series, freq, func=None, position='END') + if series.has_duplicated_dates(): + raise TimeSeriesError("The input series must not have duplicated dates!") + elif series.has_missing_dates(): + series = fill_missing_dates(series) + return convert(series, freq, func=None, position=position) TimeSeries.convert = convert TimeSeries.group_byperiod = group_byperiod @@ -1407,7 +1416,10 @@ nshp = (newdates.size,) else: nshp = tuple([-1,] + list(data.shape[1:])) - return time_series(newdata.reshape(nshp), newdates) + _data = newdata.reshape(nshp).view(type(data)) + _data._dates = newdates + return _data +# return time_series(newdata.reshape(nshp), newdates) #............................................................................... def stack(*series): """performs a column_stack on the data from each series, and the From scipy-svn at scipy.org Mon May 7 12:19:17 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 7 May 2007 11:19:17 -0500 (CDT) Subject: [Scipy-svn] r2971 - in trunk/Lib/special: . cephes specfun Message-ID: <20070507161917.21F1539C01A@new.scipy.org> Author: cookedm Date: 2007-05-07 11:19:10 -0500 (Mon, 07 May 2007) New Revision: 2971 Removed: trunk/Lib/special/cephes/jn.c Modified: trunk/Lib/special/_cephesmodule.c trunk/Lib/special/specfun/specfun.f Log: #398 alias scipy.special.jn to scipy.special.jv, and remove jn code. Cephes' jn code is suboptimal compared with jv. Modified: trunk/Lib/special/_cephesmodule.c =================================================================== --- trunk/Lib/special/_cephesmodule.c 2007-05-07 03:04:25 UTC (rev 2970) +++ trunk/Lib/special/_cephesmodule.c 2007-05-07 16:19:10 UTC (rev 2971) @@ -69,7 +69,6 @@ static void * exp1_data[] = { (void *)exp1_wrap, (void *)exp1_wrap, (void *)cexp1_wrap, (void *)cexp1_wrap,}; static void * expi_data[] = { (void *)expi_wrap, (void *)expi_wrap,}; static void * expn_data[] = { (void *)expn, (void *)expn, }; -static void * jn_data[] = { (void *)jn, (void *)jn, }; static void * kn_data[] = { (void *)kn, (void *)kn, }; static void * pdtrc_data[] = { (void *)pdtrc, (void *)pdtrc, }; @@ -568,10 +567,6 @@ Py_DECREF(f); - - f = PyUFunc_FromFuncAndData(cephes2a_functions, jn_data, cephes_3_types, 2, 2, 1, PyUFunc_None, "jn", jn_doc, 0); - PyDict_SetItemString(dictionary, "jn", f); - Py_DECREF(f); f = PyUFunc_FromFuncAndData(cephes2a_functions, kn_data, cephes_3_types, 2, 2, 1, PyUFunc_None, "kn", kn_doc, 0); PyDict_SetItemString(dictionary, "kn", f); Py_DECREF(f); @@ -656,6 +651,9 @@ f = PyUFunc_FromFuncAndData(cephes2c_functions, jv_data, cephes_3c_types, 4, 2, 1, PyUFunc_None, "jv", jv_doc, 0); PyDict_SetItemString(dictionary, "jv", f); + /* cephes jn doesn't have any advantages over jv, and is less + accurate. So we alias jv to jn */ + PyDict_SetItemString(dictionary, "jn", f); Py_DECREF(f); f = PyUFunc_FromFuncAndData(cephes2cp_functions, jve_data, cephes_3c_types, 4, 2, 1, PyUFunc_None, "jve", jve_doc, 0); PyDict_SetItemString(dictionary, "jve", f); Deleted: trunk/Lib/special/cephes/jn.c =================================================================== --- trunk/Lib/special/cephes/jn.c 2007-05-07 03:04:25 UTC (rev 2970) +++ trunk/Lib/special/cephes/jn.c 2007-05-07 16:19:10 UTC (rev 2971) @@ -1,139 +0,0 @@ -/* jn.c - * - * Bessel function of integer order - * - * - * - * SYNOPSIS: - * - * int n; - * double x, y, jn(); - * - * y = jn( n, x ); - * - * - * - * DESCRIPTION: - * - * Returns Bessel function of order n, where n is a - * (possibly negative) integer. - * - * The ratio of jn(x) to j0(x) is computed by backward - * recurrence. First the ratio jn/jn-1 is found by a - * continued fraction expansion. Then the recurrence - * relating successive orders is applied until j0 or j1 is - * reached. - * - * If n = 0 or 1 the routine for j0 or j1 is called - * directly. - * - * - * - * ACCURACY: - * - * Absolute error: - * arithmetic range # trials peak rms - * DEC 0, 30 5500 6.9e-17 9.3e-18 - * IEEE 0, 30 5000 4.4e-16 7.9e-17 - * - * - * Not suitable for large n or x. Use jv() instead. - * - */ - -/* jn.c -Cephes Math Library Release 2.8: June, 2000 -Copyright 1984, 1987, 2000 by Stephen L. Moshier -*/ -#include "mconf.h" -#ifdef ANSIPROT -extern double fabs ( double ); -extern double j0 ( double ); -extern double j1 ( double ); -#else -double fabs(), j0(), j1(); -#endif -extern double MACHEP; - -double jn( n, x ) -int n; -double x; -{ -double pkm2, pkm1, pk, xk, r, ans; -int k, sign; - -if( n < 0 ) - { - n = -n; - if( (n & 1) == 0 ) /* -1**n */ - sign = 1; - else - sign = -1; - } -else - sign = 1; - -if( x < 0.0 ) - { - if( n & 1 ) - sign = -sign; - x = -x; - } - -if( n == 0 ) - return( sign * j0(x) ); -if( n == 1 ) - return( sign * j1(x) ); -if( n == 2 ) { - if (x < 1e-5) { - double y = x*x; - return sign * 0.125 * y * (1 - y / 12.); - } else { - return( sign * (2.0 * j1(x) / x - j0(x)) ); - } -} - -if( x < MACHEP ) - return( 0.0 ); - -/* continued fraction */ -#ifdef DEC -k = 56; -#else -k = 53; -#endif - -pk = 2 * (n + k); -ans = pk; -xk = x * x; - -do - { - pk -= 2.0; - ans = pk - (xk/ans); - } -while( --k > 0 ); -ans = x/ans; - -/* backward recurrence */ - -pk = 1.0; -pkm1 = 1.0/ans; -k = n-1; -r = 2 * k; - -do - { - pkm2 = (pkm1 * r - pk * x) / x; - pk = pkm1; - pkm1 = pkm2; - r -= 2.0; - } -while( --k > 0 ); - -if( fabs(pk) > fabs(pkm1) ) - ans = j1(x)/pk; -else - ans = j0(x)/pkm1; -return( sign * ans ); -} Modified: trunk/Lib/special/specfun/specfun.f =================================================================== --- trunk/Lib/special/specfun/specfun.f 2007-05-07 03:04:25 UTC (rev 2970) +++ trunk/Lib/special/specfun/specfun.f 2007-05-07 16:19:10 UTC (rev 2971) @@ -24,48 +24,48 @@ C Routine called: GAIH for computing ?(x), x=n/2 (n=1,2,...) C =========================================================== C - IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) - IMPLICIT COMPLEX*16 (C,Z) - EPS=1.0D-15 - PI=3.141592653589793D0 - SQ2=DSQRT(2.0D0) - CA0=CDEXP(-.25D0*Z*Z) - VA0=0.5D0*(1.0D0-N) - IF (N.EQ.0.0) THEN - CDN=CA0 - ELSE - IF (CDABS(Z).EQ.0.0) THEN - IF (VA0.LE.0.0.AND.VA0.EQ.INT(VA0)) THEN - CDN=0.0D0 - ELSE - CALL GAIH(VA0,GA0) - PD=DSQRT(PI)/(2.0D0**(-.5D0*N)*GA0) - CDN=CMPLX(PD,0.0D0) - ENDIF - ELSE - XN=-N - CALL GAIH(XN,G1) - CB0=2.0D0**(-0.5D0*N-1.0D0)*CA0/G1 - VT=-.5D0*N - CALL GAIH(VT,G0) - CDN=CMPLX(G0,0.0D0) - CR=(1.0D0,0.0D0) - DO 10 M=1,250 - VM=.5D0*(M-N) - CALL GAIH(VM,GM) - CR=-CR*SQ2*Z/M - CDW=GM*CR - CDN=CDN+CDW - IF (CDABS(CDW).LT.CDABS(CDN)*EPS) GO TO 20 + IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) + IMPLICIT COMPLEX*16 (C,Z) + EPS=1.0D-15 + PI=3.141592653589793D0 + SQ2=DSQRT(2.0D0) + CA0=CDEXP(-.25D0*Z*Z) + VA0=0.5D0*(1.0D0-N) + IF (N.EQ.0.0) THEN + CDN=CA0 + ELSE + IF (CDABS(Z).EQ.0.0) THEN + IF (VA0.LE.0.0.AND.VA0.EQ.INT(VA0)) THEN + CDN=0.0D0 + ELSE + CALL GAIH(VA0,GA0) + PD=DSQRT(PI)/(2.0D0**(-.5D0*N)*GA0) + CDN=CMPLX(PD,0.0D0) + ENDIF + ELSE + XN=-N + CALL GAIH(XN,G1) + CB0=2.0D0**(-0.5D0*N-1.0D0)*CA0/G1 + VT=-.5D0*N + CALL GAIH(VT,G0) + CDN=CMPLX(G0,0.0D0) + CR=(1.0D0,0.0D0) + DO 10 M=1,250 + VM=.5D0*(M-N) + CALL GAIH(VM,GM) + CR=-CR*SQ2*Z/M + CDW=GM*CR + CDN=CDN+CDW + IF (CDABS(CDW).LT.CDABS(CDN)*EPS) GO TO 20 10 CONTINUE 20 CDN=CB0*CDN - ENDIF - ENDIF - RETURN - END + ENDIF + ENDIF + RETURN + END - + C ********************************** SUBROUTINE CFS(Z,ZF,ZD) @@ -459,67 +459,67 @@ C functions with a small argument C ===================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION BK(200),CK(200),U(200),V(200),W(200) - EPS=1.0D-14 - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - NM=25+INT(0.5*(N-M)+C) - U(1)=0.0D0 - N2=NM-2 - DO 10 J=2,N2 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION BK(200),CK(200),U(200),V(200),W(200) + EPS=1.0D-14 + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + NM=25+INT(0.5*(N-M)+C) + U(1)=0.0D0 + N2=NM-2 + DO 10 J=2,N2 10 U(J)=C*C - DO 15 J=1,N2 + DO 15 J=1,N2 15 V(J)=(2.0*J-1.0-IP)*(2.0*(J-M)-IP)+M*(M-1.0)-CV - DO 20 J=1,NM-1 + DO 20 J=1,NM-1 20 W(J)=(2.0*J-IP)*(2.0*J+1.0-IP) - IF (IP.EQ.0) THEN + IF (IP.EQ.0) THEN SW=0.0D0 - DO 40 K=0,N2-1 - S1=0.0D0 - I1=K-M+1 - DO 30 I=I1,NM - IF (I.LT.0) GO TO 30 - R1=1.0D0 - DO 25 J=1,K + DO 40 K=0,N2-1 + S1=0.0D0 + I1=K-M+1 + DO 30 I=I1,NM + IF (I.LT.0) GO TO 30 + R1=1.0D0 + DO 25 J=1,K 25 R1=R1*(I+M-J)/J - S1=S1+CK(I+1)*(2.0*I+M)*R1 - IF (DABS(S1-SW).LT.DABS(S1)*EPS) GO TO 35 - SW=S1 + S1=S1+CK(I+1)*(2.0*I+M)*R1 + IF (DABS(S1-SW).LT.DABS(S1)*EPS) GO TO 35 + SW=S1 30 CONTINUE 35 BK(K+1)=QT*S1 40 CONTINUE - ELSE IF (IP.EQ.1) THEN + ELSE IF (IP.EQ.1) THEN SW=0.0D0 - DO 60 K=0,N2-1 - S1=0.0D0 - I1=K-M+1 - DO 50 I=I1,NM - IF (I.LT.0) GO TO 50 - R1=1.0D0 - DO 45 J=1,K + DO 60 K=0,N2-1 + S1=0.0D0 + I1=K-M+1 + DO 50 I=I1,NM + IF (I.LT.0) GO TO 50 + R1=1.0D0 + DO 45 J=1,K 45 R1=R1*(I+M-J)/J - IF (I.GT.0) S1=S1+CK(I)*(2.0*I+M-1)*R1 - S1=S1-CK(I+1)*(2.0*I+M)*R1 - IF (DABS(S1-SW).LT.DABS(S1)*EPS) GO TO 55 - SW=S1 + IF (I.GT.0) S1=S1+CK(I)*(2.0*I+M-1)*R1 + S1=S1-CK(I+1)*(2.0*I+M)*R1 + IF (DABS(S1-SW).LT.DABS(S1)*EPS) GO TO 55 + SW=S1 50 CONTINUE 55 BK(K+1)=QT*S1 60 CONTINUE - ENDIF - W(1)=W(1)/V(1) - BK(1)=BK(1)/V(1) - DO 65 K=2,N2 - T=V(K)-W(K-1)*U(K) - W(K)=W(K)/T + ENDIF + W(1)=W(1)/V(1) + BK(1)=BK(1)/V(1) + DO 65 K=2,N2 + T=V(K)-W(K-1)*U(K) + W(K)=W(K)/T 65 BK(K)=(BK(K)-BK(K-1)*U(K))/T - DO 70 K=N2-1,1,-1 + DO 70 K=N2-1,1,-1 70 BK(K)=BK(K)-W(K)*BK(K+1) - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE CJY01(Z,CBJ0,CDJ0,CBJ1,CDJ1,CBY0,CDY0,CBY1,CDY1) @@ -677,97 +677,97 @@ C and joining factors C ====================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION PM(0:251),PD(0:251),QM(0:251),QD(0:251), + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION PM(0:251),PD(0:251),QM(0:251),QD(0:251), & DN(200),DF(200) - IF (DABS(DF(1)).LT.1.0D-280) THEN - R2F=1.0D+300 - R2D=1.0D+300 - RETURN - ENDIF - EPS=1.0D-14 - IP=1 - NM1=INT((N-M)/2) - IF (N-M.EQ.2*NM1) IP=0 - NM=25+NM1+INT(C) - NM2=2*NM+M - CALL KMN(M,N,C,CV,KD,DF,DN,CK1,CK2) - CALL LPMNS(M,NM2,X,PM,PD) - CALL LQMNS(M,NM2,X,QM,QD) - SU0=0.0D0 + IF (DABS(DF(1)).LT.1.0D-280) THEN + R2F=1.0D+300 + R2D=1.0D+300 + RETURN + ENDIF + EPS=1.0D-14 + IP=1 + NM1=INT((N-M)/2) + IF (N-M.EQ.2*NM1) IP=0 + NM=25+NM1+INT(C) + NM2=2*NM+M + CALL KMN(M,N,C,CV,KD,DF,DN,CK1,CK2) + CALL LPMNS(M,NM2,X,PM,PD) + CALL LQMNS(M,NM2,X,QM,QD) + SU0=0.0D0 SW=0.0D0 - DO 10 K=1,NM - J=2*K-2+M+IP - SU0=SU0+DF(K)*QM(J) - IF (K.GT.NM1.AND.DABS(SU0-SW).LT.DABS(SU0)*EPS) GO TO 15 + DO 10 K=1,NM + J=2*K-2+M+IP + SU0=SU0+DF(K)*QM(J) + IF (K.GT.NM1.AND.DABS(SU0-SW).LT.DABS(SU0)*EPS) GO TO 15 10 SW=SU0 15 SD0=0.0D0 - DO 20 K=1,NM - J=2*K-2+M+IP - SD0=SD0+DF(K)*QD(J) - IF (K.GT.NM1.AND.DABS(SD0-SW).LT.DABS(SD0)*EPS) GO TO 25 + DO 20 K=1,NM + J=2*K-2+M+IP + SD0=SD0+DF(K)*QD(J) + IF (K.GT.NM1.AND.DABS(SD0-SW).LT.DABS(SD0)*EPS) GO TO 25 20 SW=SD0 25 SU1=0.0D0 - SD1=0.0D0 - DO 30 K=1,M - J=M-2*K+IP - IF (J.LT.0) J=-J-1 - SU1=SU1+DN(K)*QM(J) + SD1=0.0D0 + DO 30 K=1,M + J=M-2*K+IP + IF (J.LT.0) J=-J-1 + SU1=SU1+DN(K)*QM(J) 30 SD1=SD1+DN(K)*QD(J) - GA=((X-1.0D0)/(X+1.0D0))**(0.5D0*M) - DO 55 K=1,M - J=M-2*K+IP - IF (J.GE.0) GO TO 55 - IF (J.LT.0) J=-J-1 - R1=1.0D0 - DO 35 J1=1,J + GA=((X-1.0D0)/(X+1.0D0))**(0.5D0*M) + DO 55 K=1,M + J=M-2*K+IP + IF (J.GE.0) GO TO 55 + IF (J.LT.0) J=-J-1 + R1=1.0D0 + DO 35 J1=1,J 35 R1=(M+J1)*R1 - R2=1.0D0 - DO 40 J2=1,M-J-2 + R2=1.0D0 + DO 40 J2=1,M-J-2 40 R2=J2*R2 - R3=1.0D0 - SF=1.0D0 - DO 45 L1=1,J - R3=0.5D0*R3*(-J+L1-1.0)*(J+L1)/((M+L1)*L1)*(1.0-X) + R3=1.0D0 + SF=1.0D0 + DO 45 L1=1,J + R3=0.5D0*R3*(-J+L1-1.0)*(J+L1)/((M+L1)*L1)*(1.0-X) 45 SF=SF+R3 - IF (M-J.GE.2) GB=(M-J-1.0D0)*R2 - IF (M-J.LE.1) GB=1.0D0 - SPL=R1*GA*GB*SF - SU1=SU1+(-1)**(J+M)*DN(K)*SPL - SPD1=M/(X*X-1.0D0)*SPL - GC=0.5D0*J*(J+1.0)/(M+1.0) - SD=1.0D0 - R4=1.0D0 - DO 50 L1=1,J-1 - R4=0.5D0*R4*(-J+L1)*(J+L1+1.0)/((M+L1+1.0)*L1) + IF (M-J.GE.2) GB=(M-J-1.0D0)*R2 + IF (M-J.LE.1) GB=1.0D0 + SPL=R1*GA*GB*SF + SU1=SU1+(-1)**(J+M)*DN(K)*SPL + SPD1=M/(X*X-1.0D0)*SPL + GC=0.5D0*J*(J+1.0)/(M+1.0) + SD=1.0D0 + R4=1.0D0 + DO 50 L1=1,J-1 + R4=0.5D0*R4*(-J+L1)*(J+L1+1.0)/((M+L1+1.0)*L1) & *(1.0-X) 50 SD=SD+R4 - SPD2=R1*GA*GB*GC*SD - SD1=SD1+(-1)**(J+M)*DN(K)*(SPD1+SPD2) + SPD2=R1*GA*GB*GC*SD + SD1=SD1+(-1)**(J+M)*DN(K)*(SPD1+SPD2) 55 CONTINUE - SU2=0.0D0 - KI=(2*M+1+IP)/2 - NM3=NM+KI - DO 60 K=KI,NM3 - J=2*K-1-M-IP - SU2=SU2+DN(K)*PM(J) - IF (J.GT.M.AND.DABS(SU2-SW).LT.DABS(SU2)*EPS) GO TO 65 + SU2=0.0D0 + KI=(2*M+1+IP)/2 + NM3=NM+KI + DO 60 K=KI,NM3 + J=2*K-1-M-IP + SU2=SU2+DN(K)*PM(J) + IF (J.GT.M.AND.DABS(SU2-SW).LT.DABS(SU2)*EPS) GO TO 65 60 SW=SU2 65 SD2=0.0D0 - DO 70 K=KI,NM3 - J=2*K-1-M-IP - SD2=SD2+DN(K)*PD(J) - IF (J.GT.M.AND.DABS(SD2-SW).LT.DABS(SD2)*EPS) GO TO 75 + DO 70 K=KI,NM3 + J=2*K-1-M-IP + SD2=SD2+DN(K)*PD(J) + IF (J.GT.M.AND.DABS(SD2-SW).LT.DABS(SD2)*EPS) GO TO 75 70 SW=SD2 75 SUM=SU0+SU1+SU2 - SDM=SD0+SD1+SD2 - R2F=SUM/CK2 - R2D=SDM/CK2 - RETURN - END + SDM=SD0+SD1+SD2 + R2F=SUM/CK2 + R2D=SDM/CK2 + RETURN + END - + C ********************************** SUBROUTINE BERNOB(N,BN) @@ -833,33 +833,33 @@ C with a small argument C ========================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION AP(200),CK(200) - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - R=1.0D0/CK(1)**2 - AP(1)=R - DO 20 I=1,M - S=0.0D0 - DO 15 L=1,I - SK=0.0D0 - DO 10 K=0,L + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION AP(200),CK(200) + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + R=1.0D0/CK(1)**2 + AP(1)=R + DO 20 I=1,M + S=0.0D0 + DO 15 L=1,I + SK=0.0D0 + DO 10 K=0,L 10 SK=SK+CK(K+1)*CK(L-K+1) 15 S=S+SK*AP(I-L+1) 20 AP(I+1)=-R*S - QS0=AP(M+1) - DO 30 L=1,M - R=1.0D0 - DO 25 K=1,L + QS0=AP(M+1) + DO 30 L=1,M + R=1.0D0 + DO 25 K=1,L 25 R=R*(2.0D0*K+IP)*(2.0D0*K-1.0D0+IP)/(2.0D0*K)**2 30 QS0=QS0+AP(M-L+1)*R - QS=(-1)**IP*CK1*(CK1*QS0)/C - QT=-2.0D0/CK1*QS - RETURN - END + QS=(-1)**IP*CK1*(CK1*QS0)/C + QT=-2.0D0/CK1*QS + RETURN + END - + C ********************************** SUBROUTINE CV0(KD,M,Q,A0) @@ -1456,78 +1456,78 @@ C functions of the second kind C ======================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION DF(200),SY(0:251),DY(0:251) - EPS=1.0D-14 - IP=1 - NM1=INT((N-M)/2) - IF (N-M.EQ.2*NM1) IP=0 - NM=25+NM1+INT(C) - REG=1.0D0 - IF (M+NM.GT.80) REG=1.0D-200 - NM2=2*NM+M - CX=C*X - CALL SPHY(NM2,CX,NM2,SY,DY) - R0=REG - DO 10 J=1,2*M+IP + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION DF(200),SY(0:251),DY(0:251) + EPS=1.0D-14 + IP=1 + NM1=INT((N-M)/2) + IF (N-M.EQ.2*NM1) IP=0 + NM=25+NM1+INT(C) + REG=1.0D0 + IF (M+NM.GT.80) REG=1.0D-200 + NM2=2*NM+M + CX=C*X + CALL SPHY(NM2,CX,NM2,SY,DY) + R0=REG + DO 10 J=1,2*M+IP 10 R0=R0*J - R=R0 - SUC=R*DF(1) + R=R0 + SUC=R*DF(1) SW=0.0D0 - DO 15 K=2,NM - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - SUC=SUC+R*DF(K) - IF (K.GT.NM1.AND.DABS(SUC-SW).LT.DABS(SUC)*EPS) GO TO 20 + DO 15 K=2,NM + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + SUC=SUC+R*DF(K) + IF (K.GT.NM1.AND.DABS(SUC-SW).LT.DABS(SUC)*EPS) GO TO 20 15 SW=SUC 20 A0=(1.0D0-KD/(X*X))**(0.5D0*M)/SUC - R2F=0.0D0 + R2F=0.0D0 EPS1=0.0D0 NP=0 - DO 50 K=1,NM - L=2*K+M-N-2+IP + DO 50 K=1,NM + L=2*K+M-N-2+IP LG=1 - IF (L.NE.4*INT(L/4)) LG=-1 - IF (K.EQ.1) THEN - R=R0 - ELSE - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - ENDIF - NP=M+2*K-2+IP - R2F=R2F+LG*R*(DF(K)*SY(NP)) - EPS1=DABS(R2F-SW) - IF (K.GT.NM1.AND.EPS1.LT.DABS(R2F)*EPS) GO TO 55 + IF (L.NE.4*INT(L/4)) LG=-1 + IF (K.EQ.1) THEN + R=R0 + ELSE + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + ENDIF + NP=M+2*K-2+IP + R2F=R2F+LG*R*(DF(K)*SY(NP)) + EPS1=DABS(R2F-SW) + IF (K.GT.NM1.AND.EPS1.LT.DABS(R2F)*EPS) GO TO 55 50 SW=R2F 55 ID1=INT(LOG10(EPS1/DABS(R2F)+EPS)) - R2F=R2F*A0 - IF (NP.GE.NM2) THEN - ID=10 - RETURN - ENDIF - B0=KD*M/X**3.0D0/(1.0-KD/(X*X))*R2F - SUD=0.0D0 + R2F=R2F*A0 + IF (NP.GE.NM2) THEN + ID=10 + RETURN + ENDIF + B0=KD*M/X**3.0D0/(1.0-KD/(X*X))*R2F + SUD=0.0D0 EPS2=0.0D0 - DO 60 K=1,NM - L=2*K+M-N-2+IP - LG=1 - IF (L.NE.4*INT(L/4)) LG=-1 - IF (K.EQ.1) THEN - R=R0 - ELSE - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - ENDIF - NP=M+2*K-2+IP - SUD=SUD+LG*R*(DF(K)*DY(NP)) - EPS2=DABS(SUD-SW) - IF (K.GT.NM1.AND.EPS2.LT.DABS(SUD)*EPS) GO TO 65 + DO 60 K=1,NM + L=2*K+M-N-2+IP + LG=1 + IF (L.NE.4*INT(L/4)) LG=-1 + IF (K.EQ.1) THEN + R=R0 + ELSE + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + ENDIF + NP=M+2*K-2+IP + SUD=SUD+LG*R*(DF(K)*DY(NP)) + EPS2=DABS(SUD-SW) + IF (K.GT.NM1.AND.EPS2.LT.DABS(SUD)*EPS) GO TO 65 60 SW=SUD 65 R2D=B0+A0*C*SUD - ID2=INT(LOG10(EPS2/DABS(SUD)+EPS)) - ID=MAX(ID1,ID2) - RETURN - END + ID2=INT(LOG10(EPS2/DABS(SUD)+EPS)) + ID=MAX(ID1,ID2) + RETURN + END - + C ********************************** SUBROUTINE PSI_SPEC(X,PS) @@ -1682,51 +1682,51 @@ C PD(n) --- Pmn'(x) C ======================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION PM(0:N),PD(0:N) - DO 10 K=0,N - PM(K)=0.0D0 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION PM(0:N),PD(0:N) + DO 10 K=0,N + PM(K)=0.0D0 10 PD(K)=0.0D0 - IF (DABS(X).EQ.1.0D0) THEN - DO 15 K=0,N - IF (M.EQ.0) THEN - PM(K)=1.0D0 - PD(K)=0.5D0*K*(K+1.0) - IF (X.LT.0.0) THEN - PM(K)=(-1)**K*PM(K) - PD(K)=(-1)**(K+1)*PD(K) - ENDIF - ELSE IF (M.EQ.1) THEN - PD(K)=1.0D+300 - ELSE IF (M.EQ.2) THEN - PD(K)=-0.25D0*(K+2.0)*(K+1.0)*K*(K-1.0) - IF (X.LT.0.0) PD(K)=(-1)**(K+1)*PD(K) - ENDIF + IF (DABS(X).EQ.1.0D0) THEN + DO 15 K=0,N + IF (M.EQ.0) THEN + PM(K)=1.0D0 + PD(K)=0.5D0*K*(K+1.0) + IF (X.LT.0.0) THEN + PM(K)=(-1)**K*PM(K) + PD(K)=(-1)**(K+1)*PD(K) + ENDIF + ELSE IF (M.EQ.1) THEN + PD(K)=1.0D+300 + ELSE IF (M.EQ.2) THEN + PD(K)=-0.25D0*(K+2.0)*(K+1.0)*K*(K-1.0) + IF (X.LT.0.0) PD(K)=(-1)**(K+1)*PD(K) + ENDIF 15 CONTINUE - RETURN - ENDIF - X0=DABS(1.0D0-X*X) - PM0=1.0D0 - PMK=PM0 - DO 20 K=1,M - PMK=(2.0D0*K-1.0D0)*DSQRT(X0)*PM0 + RETURN + ENDIF + X0=DABS(1.0D0-X*X) + PM0=1.0D0 + PMK=PM0 + DO 20 K=1,M + PMK=(2.0D0*K-1.0D0)*DSQRT(X0)*PM0 20 PM0=PMK - PM1=(2.0D0*M+1.0D0)*X*PM0 - PM(M)=PMK - PM(M+1)=PM1 - DO 25 K=M+2,N - PM2=((2.0D0*K-1.0D0)*X*PM1-(K+M-1.0D0)*PMK)/(K-M) - PM(K)=PM2 - PMK=PM1 + PM1=(2.0D0*M+1.0D0)*X*PM0 + PM(M)=PMK + PM(M+1)=PM1 + DO 25 K=M+2,N + PM2=((2.0D0*K-1.0D0)*X*PM1-(K+M-1.0D0)*PMK)/(K-M) + PM(K)=PM2 + PMK=PM1 25 PM1=PM2 - PD(0)=((1.0D0-M)*PM(1)-X*PM(0))/(X*X-1.0) - DO 30 K=1,N + PD(0)=((1.0D0-M)*PM(1)-X*PM(0))/(X*X-1.0) + DO 30 K=1,N 30 PD(K)=(K*X*PM(K)-(K+M)*PM(K-1))/(X*X-1.0D0) DO 35 K=1,N PM(K)=(-1)**M*PM(K) 35 PD(K)=(-1)**M*PD(K) - RETURN - END + RETURN + END C ********************************** @@ -1830,24 +1830,24 @@ C of the second kind for a small argument C ============================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION DF(200) - KD=1 - CALL SDMN(M,N,C,CV,KD,DF) - IF (KF.NE.2) THEN - CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) - ENDIF - IF (KF.GT.1) THEN - CALL RMN2L(M,N,C,X,DF,KD,R2F,R2D,ID) - IF (ID.GT.-8) THEN - CALL RMN2SP(M,N,C,X,CV,DF,KD,R2F,R2D) - ENDIF - ENDIF - RETURN - END + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION DF(200) + KD=1 + CALL SDMN(M,N,C,CV,KD,DF) + IF (KF.NE.2) THEN + CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) + ENDIF + IF (KF.GT.1) THEN + CALL RMN2L(M,N,C,X,DF,KD,R2F,R2D,ID) + IF (ID.GT.-8) THEN + CALL RMN2SP(M,N,C,X,CV,DF,KD,R2F,R2D) + ENDIF + ENDIF + RETURN + END - + C ********************************** SUBROUTINE JYNDD(N,X,BJN,DJN,FJN,BYN,DYN,FYN) @@ -1922,9 +1922,9 @@ C Output: GA --- ?(x) C ================================================ C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION G(25) - DATA G/1.0D0,0.5772156649015329D0, + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION G(25) + DATA G/1.0D0,0.5772156649015329D0, & -0.6558780715202538D0, -0.420026350340952D-1, & 0.1665386113822915D0, -.421977345555443D-1, & -.96219715278770D-2, .72189432466630D-2, @@ -1936,12 +1936,12 @@ & .1043427D-9, .77823D-11, & -.36968D-11, .51D-12, & -.206D-13, -.54D-14, .14D-14/ - GR=(25) - DO 20 K=24,1,-1 + GR=(25) + DO 20 K=24,1,-1 20 GR=GR*X+G(K) - GA=1.0D0/(GR*X) - RETURN - END + GA=1.0D0/(GR*X) + RETURN + END C ********************************** @@ -2475,39 +2475,39 @@ C Output: CER --- erf(z) C ==================================================== C - IMPLICIT COMPLEX *16 (C,Z) - DOUBLE PRECISION A0,PI - A0=CDABS(Z) - C0=CDEXP(-Z*Z) - PI=3.141592653589793D0 - Z1=Z - IF (DBLE(Z).LT.0.0) THEN - Z1=-Z - ENDIF - IF (A0.LE.5.8D0) THEN - CS=Z1 - CR=Z1 - DO 10 K=1,120 - CR=CR*Z1*Z1/(K+0.5D0) - CS=CS+CR - IF (CDABS(CR/CS).LT.1.0D-15) GO TO 15 + IMPLICIT COMPLEX *16 (C,Z) + DOUBLE PRECISION A0,PI + A0=CDABS(Z) + C0=CDEXP(-Z*Z) + PI=3.141592653589793D0 + Z1=Z + IF (DBLE(Z).LT.0.0) THEN + Z1=-Z + ENDIF + IF (A0.LE.5.8D0) THEN + CS=Z1 + CR=Z1 + DO 10 K=1,120 + CR=CR*Z1*Z1/(K+0.5D0) + CS=CS+CR + IF (CDABS(CR/CS).LT.1.0D-15) GO TO 15 10 CONTINUE 15 CER=2.0D0*C0*CS/DSQRT(PI) - ELSE - CL=1.0D0/Z1 - CR=CL - DO 20 K=1,13 - CR=-CR*(K-0.5D0)/(Z1*Z1) - CL=CL+CR - IF (CDABS(CR/CL).LT.1.0D-15) GO TO 25 + ELSE + CL=1.0D0/Z1 + CR=CL + DO 20 K=1,13 + CR=-CR*(K-0.5D0)/(Z1*Z1) + CL=CL+CR + IF (CDABS(CR/CL).LT.1.0D-15) GO TO 25 20 CONTINUE 25 CER=1.0D0-C0*CL/DSQRT(PI) - ENDIF - IF (DBLE(Z).LT.0.0) THEN - CER=-CER - ENDIF - RETURN - END + ENDIF + IF (DBLE(Z).LT.0.0) THEN + CER=-CER + ENDIF + RETURN + END @@ -2909,124 +2909,124 @@ C (2) GAM0 for computing gamma function (|x| ? 1) C ========================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION VL(0:*),DL(0:*) - PI=3.141592653589793D0 - RP2=0.63661977236758D0 - X=DABS(X) - X2=X*X - N=INT(V) - V0=V-N - VM=V - IF (X.LE.12.0D0) THEN - DO 25 K=0,N - VK=V0+K - BK=1.0D0 - R=1.0D0 - DO 10 I=1,50 - R=-0.25D0*R*X2/(I*(I+VK)) - BK=BK+R - IF (DABS(R).LT.DABS(BK)*1.0D-15) GO TO 15 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION VL(0:*),DL(0:*) + PI=3.141592653589793D0 + RP2=0.63661977236758D0 + X=DABS(X) + X2=X*X + N=INT(V) + V0=V-N + VM=V + IF (X.LE.12.0D0) THEN + DO 25 K=0,N + VK=V0+K + BK=1.0D0 + R=1.0D0 + DO 10 I=1,50 + R=-0.25D0*R*X2/(I*(I+VK)) + BK=BK+R + IF (DABS(R).LT.DABS(BK)*1.0D-15) GO TO 15 10 CONTINUE 15 VL(K)=BK - UK=1.0D0 - R=1.0D0 - DO 20 I=1,50 - R=-0.25D0*R*X2/(I*(I+VK+1.0D0)) - UK=UK+R - IF (DABS(R).LT.DABS(UK)*1.0D-15) GO TO 25 + UK=1.0D0 + R=1.0D0 + DO 20 I=1,50 + R=-0.25D0*R*X2/(I*(I+VK+1.0D0)) + UK=UK+R + IF (DABS(R).LT.DABS(UK)*1.0D-15) GO TO 25 20 CONTINUE 25 DL(K)=-0.5D0*X/(VK+1.0D0)*UK - RETURN - ENDIF - K0=11 - IF (X.GE.35.0D0) K0=10 - IF (X.GE.50.0D0) K0=8 + RETURN + ENDIF + K0=11 + IF (X.GE.35.0D0) K0=10 + IF (X.GE.50.0D0) K0=8 BJV0=0.0D0 BJV1=0.0D0 - DO 40 J=0,1 - VV=4.0D0*(J+V0)*(J+V0) - PX=1.0D0 - RP=1.0D0 - DO 30 K=1,K0 - RP=-0.78125D-2*RP*(VV-(4.0*K-3.0)**2.0)*(VV- + DO 40 J=0,1 + VV=4.0D0*(J+V0)*(J+V0) + PX=1.0D0 + RP=1.0D0 + DO 30 K=1,K0 + RP=-0.78125D-2*RP*(VV-(4.0*K-3.0)**2.0)*(VV- & (4.0*K-1.0)**2.0)/(K*(2.0*K-1.0)*X2) 30 PX=PX+RP - QX=1.0D0 - RQ=1.0D0 - DO 35 K=1,K0 - RQ=-0.78125D-2*RQ*(VV-(4.0*K-1.0)**2.0)*(VV- + QX=1.0D0 + RQ=1.0D0 + DO 35 K=1,K0 + RQ=-0.78125D-2*RQ*(VV-(4.0*K-1.0)**2.0)*(VV- & (4.0*K+1.0)**2.0)/(K*(2.0*K+1.0)*X2) 35 QX=QX+RQ - QX=0.125D0*(VV-1.0D0)*QX/X - XK=X-(0.5D0*(J+V0)+0.25D0)*PI - A0=DSQRT(RP2/X) - CK=DCOS(XK) - SK=DSIN(XK) - IF (J.EQ.0) BJV0=A0*(PX*CK-QX*SK) - IF (J.EQ.1) BJV1=A0*(PX*CK-QX*SK) + QX=0.125D0*(VV-1.0D0)*QX/X + XK=X-(0.5D0*(J+V0)+0.25D0)*PI + A0=DSQRT(RP2/X) + CK=DCOS(XK) + SK=DSIN(XK) + IF (J.EQ.0) BJV0=A0*(PX*CK-QX*SK) + IF (J.EQ.1) BJV1=A0*(PX*CK-QX*SK) 40 CONTINUE - IF (V0.EQ.0.0D0) THEN - GA=1.0D0 - ELSE - CALL GAM0(V0,GA) - GA=V0*GA - ENDIF - FAC=(2.0D0/X)**V0*GA - VL(0)=BJV0 - DL(0)=-BJV1+V0/X*BJV0 - VL(1)=BJV1 - DL(1)=BJV0-(1.0D0+V0)/X*BJV1 - R0=2.0D0*(1.0D0+V0)/X - IF (N.LE.1) THEN - VL(0)=FAC*VL(0) - DL(0)=FAC*DL(0)-V0/X*VL(0) - VL(1)=FAC*R0*VL(1) - DL(1)=FAC*R0*DL(1)-(1.0D0+V0)/X*VL(1) - RETURN - ENDIF - IF (N.GE.2.AND.N.LE.INT(0.9*X)) THEN - F0=BJV0 - F1=BJV1 - DO 45 K=2,N - F=2.0D0*(K+V0-1.0D0)/X*F1-F0 - F0=F1 - F1=F + IF (V0.EQ.0.0D0) THEN + GA=1.0D0 + ELSE + CALL GAM0(V0,GA) + GA=V0*GA + ENDIF + FAC=(2.0D0/X)**V0*GA + VL(0)=BJV0 + DL(0)=-BJV1+V0/X*BJV0 + VL(1)=BJV1 + DL(1)=BJV0-(1.0D0+V0)/X*BJV1 + R0=2.0D0*(1.0D0+V0)/X + IF (N.LE.1) THEN + VL(0)=FAC*VL(0) + DL(0)=FAC*DL(0)-V0/X*VL(0) + VL(1)=FAC*R0*VL(1) + DL(1)=FAC*R0*DL(1)-(1.0D0+V0)/X*VL(1) + RETURN + ENDIF + IF (N.GE.2.AND.N.LE.INT(0.9*X)) THEN + F0=BJV0 + F1=BJV1 + DO 45 K=2,N + F=2.0D0*(K+V0-1.0D0)/X*F1-F0 + F0=F1 + F1=F 45 VL(K)=F - ELSE IF (N.GE.2) THEN - M=MSTA1(X,200) - IF (M.LT.N) THEN - N=M - ELSE - M=MSTA2(X,N,15) - ENDIF + ELSE IF (N.GE.2) THEN + M=MSTA1(X,200) + IF (M.LT.N) THEN + N=M + ELSE + M=MSTA2(X,N,15) + ENDIF F=0.0D0 - F2=0.0D0 - F1=1.0D-100 - DO 50 K=M,0,-1 - F=2.0D0*(V0+K+1.0D0)/X*F1-F2 - IF (K.LE.N) VL(K)=F - F2=F1 + F2=0.0D0 + F1=1.0D-100 + DO 50 K=M,0,-1 + F=2.0D0*(V0+K+1.0D0)/X*F1-F2 + IF (K.LE.N) VL(K)=F + F2=F1 50 F1=F CS=0.0D0 - IF (DABS(BJV0).GT.DABS(BJV1)) CS=BJV0/F - ELSE CS=BJV1/F2 - DO 55 K=0,N + IF (DABS(BJV0).GT.DABS(BJV1)) CS=BJV0/F + ELSE CS=BJV1/F2 + DO 55 K=0,N 55 VL(K)=CS*VL(K) - ENDIF - VL(0)=FAC*VL(0) - DO 65 J=1,N - RC=FAC*R0 - VL(J)=RC*VL(J) - DL(J-1)=-0.5D0*X/(J+V0)*VL(J) + ENDIF + VL(0)=FAC*VL(0) + DO 65 J=1,N + RC=FAC*R0 + VL(J)=RC*VL(J) + DL(J-1)=-0.5D0*X/(J+V0)*VL(J) 65 R0=2.0D0*(J+V0+1)/X*R0 - DL(N)=2.0D0*(V0+N)*(VL(N-1)-VL(N))/X - VM=N+V0 - RETURN - END + DL(N)=2.0D0*(V0+N)*(VL(N-1)-VL(N))/X + VM=N+V0 + RETURN + END - + C ********************************** SUBROUTINE CHGUIT(A,B,X,HU,ID) @@ -3139,84 +3139,84 @@ C and joining factors C =================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION U(200),V(200),W(200),DF(200),DN(200), + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION U(200),V(200),W(200),DF(200),DN(200), & TP(200),RK(200) - NM=25+INT(0.5*(N-M)+C) - NN=NM+M - CS=C*C*KD - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + NM=25+INT(0.5*(N-M)+C) + NN=NM+M + CS=C*C*KD + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 K=0 - DO 10 I=1,NN+3 - IF (IP.EQ.0) K=-2*(I-1) - IF (IP.EQ.1) K=-(2*I-3) - GK0=2.0D0*M+K - GK1=(M+K)*(M+K+1.0D0) - GK2=2.0D0*(M+K)-1.0D0 - GK3=2.0D0*(M+K)+3.0D0 - U(I)=GK0*(GK0-1.0D0)*CS/(GK2*(GK2+2.0D0)) - V(I)=GK1-CV+(2.0D0*(GK1-M*M)-1.0D0)*CS/(GK2*GK3) + DO 10 I=1,NN+3 + IF (IP.EQ.0) K=-2*(I-1) + IF (IP.EQ.1) K=-(2*I-3) + GK0=2.0D0*M+K + GK1=(M+K)*(M+K+1.0D0) + GK2=2.0D0*(M+K)-1.0D0 + GK3=2.0D0*(M+K)+3.0D0 + U(I)=GK0*(GK0-1.0D0)*CS/(GK2*(GK2+2.0D0)) + V(I)=GK1-CV+(2.0D0*(GK1-M*M)-1.0D0)*CS/(GK2*GK3) 10 W(I)=(K+1.0D0)*(K+2.0D0)*CS/((GK2+2.0D0)*GK3) - DO 20 K=1,M - T=V(M+1) - DO 15 L=0,M-K-1 + DO 20 K=1,M + T=V(M+1) + DO 15 L=0,M-K-1 15 T=V(M-L)-W(M-L+1)*U(M-L)/T 20 RK(K)=-U(K)/T - R=1.0D0 - DO 25 K=1,M - R=R*RK(K) + R=1.0D0 + DO 25 K=1,M + R=R*RK(K) 25 DN(K)=DF(1)*R - TP(NN)=V(NN+1) - DO 30 K=NN-1,M+1,-1 - TP(K)=V(K+1)-W(K+2)*U(K+1)/TP(K+1) - IF (K.GT.M+1) RK(K)=-U(K)/TP(K) + TP(NN)=V(NN+1) + DO 30 K=NN-1,M+1,-1 + TP(K)=V(K+1)-W(K+2)*U(K+1)/TP(K+1) + IF (K.GT.M+1) RK(K)=-U(K)/TP(K) 30 CONTINUE - IF (M.EQ.0) DNP=DF(1) - IF (M.NE.0) DNP=DN(M) - DN(M+1)=(-1)**IP*DNP*CS/((2.0*M-1.0)*(2.0*M+1.0-4.0*IP) + IF (M.EQ.0) DNP=DF(1) + IF (M.NE.0) DNP=DN(M) + DN(M+1)=(-1)**IP*DNP*CS/((2.0*M-1.0)*(2.0*M+1.0-4.0*IP) & *TP(M+1)) - DO 35 K=M+2,NN + DO 35 K=M+2,NN 35 DN(K)=RK(K)*DN(K-1) - R1=1.0D0 - DO 40 J=1,(N+M+IP)/2 + R1=1.0D0 + DO 40 J=1,(N+M+IP)/2 40 R1=R1*(J+0.5D0*(N+M+IP)) - NM1=(N-M)/2 - R=1.0D0 - DO 45 J=1,2*M+IP + NM1=(N-M)/2 + R=1.0D0 + DO 45 J=1,2*M+IP 45 R=R*J - SU0=R*DF(1) + SU0=R*DF(1) SW=0.0D0 - DO 50 K=2,NM - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - SU0=SU0+R*DF(K) - IF (K.GT.NM1.AND.DABS((SU0-SW)/SU0).LT.1.0D-14) GO TO 55 + DO 50 K=2,NM + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + SU0=SU0+R*DF(K) + IF (K.GT.NM1.AND.DABS((SU0-SW)/SU0).LT.1.0D-14) GO TO 55 50 SW=SU0 55 IF (KD.EQ.1) GOTO 70 - R2=1.0D0 - DO 60 J=1,M + R2=1.0D0 + DO 60 J=1,M 60 R2=2.0D0*C*R2*J - R3=1.0D0 - DO 65 J=1,(N-M-IP)/2 + R3=1.0D0 + DO 65 J=1,(N-M-IP)/2 65 R3=R3*J - SA0=(2.0*(M+IP)+1.0)*R1/(2.0**N*C**IP*R2*R3*DF(1)) - CK1=SA0*SU0 - IF (KD.EQ.-1) RETURN + SA0=(2.0*(M+IP)+1.0)*R1/(2.0**N*C**IP*R2*R3*DF(1)) + CK1=SA0*SU0 + IF (KD.EQ.-1) RETURN 70 R4=1.0D0 - DO 75 J=1,(N-M-IP)/2 + DO 75 J=1,(N-M-IP)/2 75 R4=4.0D0*R4*J - R5=1.0D0 - DO 80 J=1,M + R5=1.0D0 + DO 80 J=1,M 80 R5=R5*(J+M)/C - G0=DN(M) - IF (M.EQ.0) G0=DF(1) - SB0=(IP+1.0)*C**(IP+1)/(2.0*IP*(M-2.0)+1.0)/(2.0*M-1.0) - CK2=(-1)**IP*SB0*R4*R5*G0/R1*SU0 - RETURN - END + G0=DN(M) + IF (M.EQ.0) G0=DF(1) + SB0=(IP+1.0)*C**(IP+1)/(2.0*IP*(M-2.0)+1.0)/(2.0*M-1.0) + CK2=(-1)**IP*SB0*R4*R5*G0/R1*SU0 + RETURN + END - + C ********************************** SUBROUTINE LAGZO(N,X,W) @@ -4257,104 +4257,104 @@ C point for backward recurrence C ===================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION BJ(0:N),DJ(0:N),BY(0:N),DY(0:N), + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION BJ(0:N),DJ(0:N),BY(0:N),DY(0:N), & A(4),B(4),A1(4),B1(4) - PI=3.141592653589793D0 - R2P=.63661977236758D0 - NM=N - IF (X.LT.1.0D-100) THEN - DO 10 K=0,N - BJ(K)=0.0D0 - DJ(K)=0.0D0 - BY(K)=-1.0D+300 + PI=3.141592653589793D0 + R2P=.63661977236758D0 + NM=N + IF (X.LT.1.0D-100) THEN + DO 10 K=0,N + BJ(K)=0.0D0 + DJ(K)=0.0D0 + BY(K)=-1.0D+300 10 DY(K)=1.0D+300 - BJ(0)=1.0D0 - DJ(1)=0.5D0 - RETURN - ENDIF - IF (X.LE.300.0.OR.N.GT.INT(0.9*X)) THEN - IF (N.EQ.0) NM=1 - M=MSTA1(X,200) - IF (M.LT.NM) THEN - NM=M - ELSE - M=MSTA2(X,NM,15) - ENDIF - BS=0.0D0 - SU=0.0D0 - SV=0.0D0 - F2=0.0D0 - F1=1.0D-100 + BJ(0)=1.0D0 + DJ(1)=0.5D0 + RETURN + ENDIF + IF (X.LE.300.0.OR.N.GT.INT(0.9*X)) THEN + IF (N.EQ.0) NM=1 + M=MSTA1(X,200) + IF (M.LT.NM) THEN + NM=M + ELSE + M=MSTA2(X,NM,15) + ENDIF + BS=0.0D0 + SU=0.0D0 + SV=0.0D0 + F2=0.0D0 + F1=1.0D-100 F=0.0D0 - DO 15 K=M,0,-1 - F=2.0D0*(K+1.0D0)/X*F1-F2 - IF (K.LE.NM) BJ(K)=F - IF (K.EQ.2*INT(K/2).AND.K.NE.0) THEN - BS=BS+2.0D0*F - SU=SU+(-1)**(K/2)*F/K - ELSE IF (K.GT.1) THEN - SV=SV+(-1)**(K/2)*K/(K*K-1.0)*F - ENDIF - F2=F1 + DO 15 K=M,0,-1 + F=2.0D0*(K+1.0D0)/X*F1-F2 + IF (K.LE.NM) BJ(K)=F + IF (K.EQ.2*INT(K/2).AND.K.NE.0) THEN + BS=BS+2.0D0*F + SU=SU+(-1)**(K/2)*F/K + ELSE IF (K.GT.1) THEN + SV=SV+(-1)**(K/2)*K/(K*K-1.0)*F + ENDIF + F2=F1 15 F1=F - S0=BS+F - DO 20 K=0,NM + S0=BS+F + DO 20 K=0,NM 20 BJ(K)=BJ(K)/S0 - EC=DLOG(X/2.0D0)+0.5772156649015329D0 - BY0=R2P*(EC*BJ(0)-4.0D0*SU/S0) - BY(0)=BY0 - BY1=R2P*((EC-1.0D0)*BJ(1)-BJ(0)/X-4.0D0*SV/S0) - BY(1)=BY1 - ELSE - DATA A/-.7031250000000000D-01,.1121520996093750D+00, + EC=DLOG(X/2.0D0)+0.5772156649015329D0 + BY0=R2P*(EC*BJ(0)-4.0D0*SU/S0) + BY(0)=BY0 + BY1=R2P*((EC-1.0D0)*BJ(1)-BJ(0)/X-4.0D0*SV/S0) + BY(1)=BY1 + ELSE + DATA A/-.7031250000000000D-01,.1121520996093750D+00, & -.5725014209747314D+00,.6074042001273483D+01/ - DATA B/ .7324218750000000D-01,-.2271080017089844D+00, + DATA B/ .7324218750000000D-01,-.2271080017089844D+00, & .1727727502584457D+01,-.2438052969955606D+02/ - DATA A1/.1171875000000000D+00,-.1441955566406250D+00, + DATA A1/.1171875000000000D+00,-.1441955566406250D+00, & .6765925884246826D+00,-.6883914268109947D+01/ - DATA B1/-.1025390625000000D+00,.2775764465332031D+00, + DATA B1/-.1025390625000000D+00,.2775764465332031D+00, & -.1993531733751297D+01,.2724882731126854D+02/ - T1=X-0.25D0*PI - P0=1.0D0 - Q0=-0.125D0/X - DO 25 K=1,4 - P0=P0+A(K)*X**(-2*K) + T1=X-0.25D0*PI + P0=1.0D0 + Q0=-0.125D0/X + DO 25 K=1,4 + P0=P0+A(K)*X**(-2*K) 25 Q0=Q0+B(K)*X**(-2*K-1) - CU=DSQRT(R2P/X) - BJ0=CU*(P0*DCOS(T1)-Q0*DSIN(T1)) - BY0=CU*(P0*DSIN(T1)+Q0*DCOS(T1)) - BJ(0)=BJ0 - BY(0)=BY0 - T2=X-0.75D0*PI - P1=1.0D0 - Q1=0.375D0/X - DO 30 K=1,4 - P1=P1+A1(K)*X**(-2*K) + CU=DSQRT(R2P/X) + BJ0=CU*(P0*DCOS(T1)-Q0*DSIN(T1)) + BY0=CU*(P0*DSIN(T1)+Q0*DCOS(T1)) + BJ(0)=BJ0 + BY(0)=BY0 + T2=X-0.75D0*PI + P1=1.0D0 + Q1=0.375D0/X + DO 30 K=1,4 + P1=P1+A1(K)*X**(-2*K) 30 Q1=Q1+B1(K)*X**(-2*K-1) - BJ1=CU*(P1*DCOS(T2)-Q1*DSIN(T2)) - BY1=CU*(P1*DSIN(T2)+Q1*DCOS(T2)) - BJ(1)=BJ1 - BY(1)=BY1 - DO 35 K=2,NM - BJK=2.0D0*(K-1.0D0)/X*BJ1-BJ0 - BJ(K)=BJK - BJ0=BJ1 + BJ1=CU*(P1*DCOS(T2)-Q1*DSIN(T2)) + BY1=CU*(P1*DSIN(T2)+Q1*DCOS(T2)) + BJ(1)=BJ1 + BY(1)=BY1 + DO 35 K=2,NM + BJK=2.0D0*(K-1.0D0)/X*BJ1-BJ0 + BJ(K)=BJK + BJ0=BJ1 35 BJ1=BJK - ENDIF - DJ(0)=-BJ(1) - DO 40 K=1,NM + ENDIF + DJ(0)=-BJ(1) + DO 40 K=1,NM 40 DJ(K)=BJ(K-1)-K/X*BJ(K) - DO 45 K=2,NM - BYK=2.0D0*(K-1.0D0)*BY1/X-BY0 - BY(K)=BYK - BY0=BY1 + DO 45 K=2,NM + BYK=2.0D0*(K-1.0D0)*BY1/X-BY0 + BY(K)=BYK + BY0=BY1 45 BY1=BYK - DY(0)=-BY(1) - DO 50 K=1,NM + DY(0)=-BY(1) + DO 50 K=1,NM 50 DY(K)=BY(K-1)-K*BY(K)/X - RETURN - END + RETURN + END C ********************************** @@ -4474,54 +4474,54 @@ C SCKB for computing expansion coefficients ck C =========================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION CK(200),DF(200) - EPS=1.0D-14 - X0=X - X=DABS(X) - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - NM=40+INT((N-M)/2+C) - NM2=NM/2-2 - CALL SDMN(M,N,C,CV,KD,DF) - CALL SCKB(M,N,C,DF,CK) - X1=1.0D0-X*X - IF (M.EQ.0.AND.X1.EQ.0.0D0) THEN - A0=1.0D0 - ELSE - A0=X1**(0.5D0*M) - ENDIF - SU1=CK(1) - DO 10 K=1,NM2 - R=CK(K+1)*X1**K - SU1=SU1+R - IF (K.GE.10.AND.DABS(R/SU1).LT.EPS) GO TO 15 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION CK(200),DF(200) + EPS=1.0D-14 + X0=X + X=DABS(X) + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + NM=40+INT((N-M)/2+C) + NM2=NM/2-2 + CALL SDMN(M,N,C,CV,KD,DF) + CALL SCKB(M,N,C,DF,CK) + X1=1.0D0-X*X + IF (M.EQ.0.AND.X1.EQ.0.0D0) THEN + A0=1.0D0 + ELSE + A0=X1**(0.5D0*M) + ENDIF + SU1=CK(1) + DO 10 K=1,NM2 + R=CK(K+1)*X1**K + SU1=SU1+R + IF (K.GE.10.AND.DABS(R/SU1).LT.EPS) GO TO 15 10 CONTINUE 15 S1F=A0*X**IP*SU1 - IF (X.EQ.1.0D0) THEN - IF (M.EQ.0) S1D=IP*CK(1)-2.0D0*CK(2) - IF (M.EQ.1) S1D=-1.0D+100 - IF (M.EQ.2) S1D=-2.0D0*CK(1) - IF (M.GE.3) S1D=0.0D0 - ELSE - D0=IP-M/X1*X**(IP+1.0D0) - D1=-2.0D0*A0*X**(IP+1.0D0) - SU2=CK(2) - DO 20 K=2,NM2 - R=K*CK(K+1)*X1**(K-1.0D0) - SU2=SU2+R - IF (K.GE.10.AND.DABS(R/SU2).LT.EPS) GO TO 25 + IF (X.EQ.1.0D0) THEN + IF (M.EQ.0) S1D=IP*CK(1)-2.0D0*CK(2) + IF (M.EQ.1) S1D=-1.0D+100 + IF (M.EQ.2) S1D=-2.0D0*CK(1) + IF (M.GE.3) S1D=0.0D0 + ELSE + D0=IP-M/X1*X**(IP+1.0D0) + D1=-2.0D0*A0*X**(IP+1.0D0) + SU2=CK(2) + DO 20 K=2,NM2 + R=K*CK(K+1)*X1**(K-1.0D0) + SU2=SU2+R + IF (K.GE.10.AND.DABS(R/SU2).LT.EPS) GO TO 25 20 CONTINUE 25 S1D=D0*A0*SU1+D1*SU2 - ENDIF - IF (X0.LT.0.0D0.AND.IP.EQ.0) S1D=-S1D - IF (X0.LT.0.0D0.AND.IP.EQ.1) S1F=-S1F - X=X0 - RETURN - END + ENDIF + IF (X0.LT.0.0D0.AND.IP.EQ.0) S1D=-S1D + IF (X0.LT.0.0D0.AND.IP.EQ.1) S1F=-S1F + X=X0 + RETURN + END - + C ********************************** SUBROUTINE JYNA(N,X,NM,BJ,DJ,BY,DY) @@ -7478,83 +7478,83 @@ C c0, c2,... C ====================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION CK(200) - IF (C.LE.1.0D-10) C=1.0D-10 - NM=25+INT((N-M)/2+C) - CS=C*C*KD - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - FS=1.0D0 - F1=0.0D0 - F0=1.0D-100 - KB=0 - CK(NM+1)=0.0D0 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION CK(200) + IF (C.LE.1.0D-10) C=1.0D-10 + NM=25+INT((N-M)/2+C) + CS=C*C*KD + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + FS=1.0D0 + F1=0.0D0 + F0=1.0D-100 + KB=0 + CK(NM+1)=0.0D0 FL=0.0D0 - DO 15 K=NM,1,-1 - F=(((2.0D0*K+M+IP)*(2.0D0*K+M+1.0D0+IP)-CV+CS)*F0 + DO 15 K=NM,1,-1 + F=(((2.0D0*K+M+IP)*(2.0D0*K+M+1.0D0+IP)-CV+CS)*F0 & -4.0D0*(K+1.0D0)*(K+M+1.0D0)*F1)/CS - IF (DABS(F).GT.DABS(CK(K+1))) THEN - CK(K)=F - F1=F0 - F0=F - IF (DABS(F).GT.1.0D+100) THEN - DO 5 K1=NM,K,-1 + IF (DABS(F).GT.DABS(CK(K+1))) THEN + CK(K)=F + F1=F0 + F0=F + IF (DABS(F).GT.1.0D+100) THEN + DO 5 K1=NM,K,-1 5 CK(K1)=CK(K1)*1.0D-100 - F1=F1*1.0D-100 - F0=F0*1.0D-100 - ENDIF - ELSE - KB=K - FL=CK(K+1) - F1=1.0D0 - F2=0.25D0*((M+IP)*(M+IP+1.0)-CV+CS)/(M+1.0)*F1 - CK(1)=F1 - IF (KB.EQ.1) THEN - FS=F2 - ELSE IF (KB.EQ.2) THEN - CK(2)=F2 - FS=0.125D0*(((M+IP+2.0)*(M+IP+3.0)-CV+CS)*F2 + F1=F1*1.0D-100 + F0=F0*1.0D-100 + ENDIF + ELSE + KB=K + FL=CK(K+1) + F1=1.0D0 + F2=0.25D0*((M+IP)*(M+IP+1.0)-CV+CS)/(M+1.0)*F1 + CK(1)=F1 + IF (KB.EQ.1) THEN + FS=F2 + ELSE IF (KB.EQ.2) THEN + CK(2)=F2 + FS=0.125D0*(((M+IP+2.0)*(M+IP+3.0)-CV+CS)*F2 & -CS*F1)/(M+2.0) - ELSE - CK(2)=F2 - DO 10 J=3,KB+1 - F=0.25D0*(((2.0*J+M+IP-4.0)*(2.0*J+M+IP- + ELSE + CK(2)=F2 + DO 10 J=3,KB+1 + F=0.25D0*(((2.0*J+M+IP-4.0)*(2.0*J+M+IP- & 3.0)-CV+CS)*F2-CS*F1)/((J-1.0)*(J+M-1.0)) - IF (J.LE.KB) CK(J)=F - F1=F2 + IF (J.LE.KB) CK(J)=F + F1=F2 10 F2=F - FS=F - ENDIF - GO TO 20 - ENDIF + FS=F + ENDIF + GO TO 20 + ENDIF 15 CONTINUE 20 SU1=0.0D0 - DO 25 K=1,KB + DO 25 K=1,KB 25 SU1=SU1+CK(K) - SU2=0.0D0 - DO 30 K=KB+1,NM + SU2=0.0D0 + DO 30 K=KB+1,NM 30 SU2=SU2+CK(K) - R1=1.0D0 - DO 35 J=1,(N+M+IP)/2 + R1=1.0D0 + DO 35 J=1,(N+M+IP)/2 35 R1=R1*(J+0.5D0*(N+M+IP)) - R2=1.0D0 - DO 40 J=1,(N-M-IP)/2 + R2=1.0D0 + DO 40 J=1,(N-M-IP)/2 40 R2=-R2*J - IF (KB.EQ.0) THEN - S0=R1/(2.0D0**N*R2*SU2) - ELSE - S0=R1/(2.0D0**N*R2*(FL/FS*SU1+SU2)) - ENDIF - DO 45 K=1,KB + IF (KB.EQ.0) THEN + S0=R1/(2.0D0**N*R2*SU2) + ELSE + S0=R1/(2.0D0**N*R2*(FL/FS*SU1+SU2)) + ENDIF + DO 45 K=1,KB 45 CK(K)=FL/FS*S0*CK(K) - DO 50 K=KB+1,NM + DO 50 K=KB+1,NM 50 CK(K)=S0*CK(K) - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE SCKB(M,N,C,DF,CK) @@ -7571,43 +7571,43 @@ C c0, c2, ... C ====================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION DF(200),CK(200) - IF (C.LE.1.0D-10) C=1.0D-10 - NM=25+INT(0.5*(N-M)+C) - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - REG=1.0D0 - IF (M+NM.GT.80) REG=1.0D-200 - FAC=-0.5D0**M + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION DF(200),CK(200) + IF (C.LE.1.0D-10) C=1.0D-10 + NM=25+INT(0.5*(N-M)+C) + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + REG=1.0D0 + IF (M+NM.GT.80) REG=1.0D-200 + FAC=-0.5D0**M SW=0.0D0 - DO 35 K=0,NM-1 - FAC=-FAC - I1=2*K+IP+1 - R=REG - DO 10 I=I1,I1+2*M-1 + DO 35 K=0,NM-1 + FAC=-FAC + I1=2*K+IP+1 + R=REG + DO 10 I=I1,I1+2*M-1 10 R=R*I - I2=K+M+IP - DO 15 I=I2,I2+K-1 + I2=K+M+IP + DO 15 I=I2,I2+K-1 15 R=R*(I+0.5D0) - SUM=R*DF(K+1) - DO 20 I=K+1,NM - D1=2.0D0*I+IP - D2=2.0D0*M+D1 - D3=I+M+IP-0.5D0 - R=R*D2*(D2-1.0D0)*I*(D3+K)/(D1*(D1-1.0D0)*(I-K)*D3) - SUM=SUM+R*DF(I+1) - IF (DABS(SW-SUM).LT.DABS(SUM)*1.0D-14) GOTO 25 + SUM=R*DF(K+1) + DO 20 I=K+1,NM + D1=2.0D0*I+IP + D2=2.0D0*M+D1 + D3=I+M+IP-0.5D0 + R=R*D2*(D2-1.0D0)*I*(D3+K)/(D1*(D1-1.0D0)*(I-K)*D3) + SUM=SUM+R*DF(I+1) + IF (DABS(SW-SUM).LT.DABS(SUM)*1.0D-14) GOTO 25 20 SW=SUM 25 R1=REG - DO 30 I=2,M+K + DO 30 I=2,M+K 30 R1=R1*I 35 CK(K+1)=FAC*SUM/R1 - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE CPDLA(N,Z,CDN) @@ -7620,22 +7620,22 @@ C Output: CDN --- Dn(z) C =========================================================== C - IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) - IMPLICIT COMPLEX*16 (C,Z) - CB0=Z**N*CDEXP(-.25D0*Z*Z) - CR=(1.0D0,0.0D0) - CDN=(1.0D0,0.0D0) - DO 10 K=1,16 - CR=-0.5D0*CR*(2.0*K-N-1.0)*(2.0*K-N-2.0)/(K*Z*Z) - CDN=CDN+CR - IF (CDABS(CR).LT.CDABS(CDN)*1.0D-12) GO TO 15 + IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) + IMPLICIT COMPLEX*16 (C,Z) + CB0=Z**N*CDEXP(-.25D0*Z*Z) + CR=(1.0D0,0.0D0) + CDN=(1.0D0,0.0D0) + DO 10 K=1,16 + CR=-0.5D0*CR*(2.0*K-N-1.0)*(2.0*K-N-2.0)/(K*Z*Z) + CDN=CDN+CR + IF (CDABS(CR).LT.CDABS(CDN)*1.0D-12) GO TO 15 10 CONTINUE 15 CDN=CB0*CDN - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE FCSZO(KF,NT,ZO) @@ -7925,35 +7925,35 @@ C of the first kind Pmn(x) C =========================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION DF(200),PM(0:251),PD(0:251) - EPS=1.0D-14 - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - NM=25+INT((N-M)/2+C) - NM2=2*NM+M - CALL SDMN(M,N,C,CV,KD,DF) - CALL LPMNS(M,NM2,X,PM,PD) + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION DF(200),PM(0:251),PD(0:251) + EPS=1.0D-14 + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + NM=25+INT((N-M)/2+C) + NM2=2*NM+M + CALL SDMN(M,N,C,CV,KD,DF) + CALL LPMNS(M,NM2,X,PM,PD) SW=0.0D0 - SU1=0.0D0 - DO 10 K=1,NM - MK=M+2*(K-1)+IP - SU1=SU1+DF(K)*PM(MK) - IF (DABS(SW-SU1).LT.DABS(SU1)*EPS) GOTO 15 + SU1=0.0D0 + DO 10 K=1,NM + MK=M+2*(K-1)+IP + SU1=SU1+DF(K)*PM(MK) + IF (DABS(SW-SU1).LT.DABS(SU1)*EPS) GOTO 15 10 SW=SU1 15 S1F=(-1)**M*SU1 - SU1=0.0D0 - DO 20 K=1,NM - MK=M+2*(K-1)+IP - SU1=SU1+DF(K)*PD(MK) - IF (DABS(SW-SU1).LT.DABS(SU1)*EPS) GOTO 25 + SU1=0.0D0 + DO 20 K=1,NM + MK=M+2*(K-1)+IP + SU1=SU1+DF(K)*PD(MK) + IF (DABS(SW-SU1).LT.DABS(SU1)*EPS) GOTO 25 20 SW=SU1 25 S1D=(-1)**M*SU1 - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE CHGUS(A,B,X,HU,ID) @@ -8288,84 +8288,84 @@ C (2) CPDLA for computing Dn(z) for a large |z| C ================================================== C - IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) - IMPLICIT COMPLEX*16 (C,Z) - DIMENSION CPB(0:*),CPD(0:*) - PI=3.141592653589793D0 - X=DBLE(Z) - A0=CDABS(Z) - C0=(0.0D0,0.0D0) - CA0=CDEXP(-0.25D0*Z*Z) + IMPLICIT DOUBLE PRECISION (A-B,D-H,O-Y) + IMPLICIT COMPLEX*16 (C,Z) + DIMENSION CPB(0:*),CPD(0:*) + PI=3.141592653589793D0 + X=DBLE(Z) + A0=CDABS(Z) + C0=(0.0D0,0.0D0) + CA0=CDEXP(-0.25D0*Z*Z) N0=0 - IF (N.GE.0) THEN - CF0=CA0 - CF1=Z*CA0 - CPB(0)=CF0 - CPB(1)=CF1 - DO 10 K=2,N - CF=Z*CF1-(K-1.0D0)*CF0 - CPB(K)=CF - CF0=CF1 + IF (N.GE.0) THEN + CF0=CA0 + CF1=Z*CA0 + CPB(0)=CF0 + CPB(1)=CF1 + DO 10 K=2,N + CF=Z*CF1-(K-1.0D0)*CF0 + CPB(K)=CF + CF0=CF1 10 CF1=CF - ELSE - N0=-N - IF (X.LE.0.0.OR.CDABS(Z).EQ.0.0) THEN - CF0=CA0 - CPB(0)=CF0 - Z1=-Z - IF (A0.LE.7.0) THEN - CALL CPDSA(-1,Z1,CF1) - ELSE - CALL CPDLA(-1,Z1,CF1) - ENDIF - CF1=DSQRT(2.0D0*PI)/CA0-CF1 - CPB(1)=CF1 - DO 15 K=2,N0 - CF=(-Z*CF1+CF0)/(K-1.0D0) - CPB(K)=CF - CF0=CF1 + ELSE + N0=-N + IF (X.LE.0.0.OR.CDABS(Z).EQ.0.0) THEN + CF0=CA0 + CPB(0)=CF0 + Z1=-Z + IF (A0.LE.7.0) THEN + CALL CPDSA(-1,Z1,CF1) + ELSE + CALL CPDLA(-1,Z1,CF1) + ENDIF + CF1=DSQRT(2.0D0*PI)/CA0-CF1 + CPB(1)=CF1 + DO 15 K=2,N0 + CF=(-Z*CF1+CF0)/(K-1.0D0) + CPB(K)=CF + CF0=CF1 15 CF1=CF - ELSE - IF (A0.LE.3.0) THEN - CALL CPDSA(-N0,Z,CFA) - CPB(N0)=CFA - N1=N0+1 - CALL CPDSA(-N1,Z,CFB) - CPB(N1)=CFB - NM1=N0-1 - DO 20 K=NM1,0,-1 - CF=Z*CFA+(K+1.0D0)*CFB - CPB(K)=CF - CFB=CFA + ELSE + IF (A0.LE.3.0) THEN + CALL CPDSA(-N0,Z,CFA) + CPB(N0)=CFA + N1=N0+1 + CALL CPDSA(-N1,Z,CFB) + CPB(N1)=CFB + NM1=N0-1 + DO 20 K=NM1,0,-1 + CF=Z*CFA+(K+1.0D0)*CFB + CPB(K)=CF + CFB=CFA 20 CFA=CF - ELSE - M=100+ABS(N) - CFA=C0 - CFB=(1.0D-30,0.0D0) - DO 25 K=M,0,-1 - CF=Z*CFB+(K+1.0D0)*CFA - IF (K.LE.N0) CPB(K)=CF - CFA=CFB + ELSE + M=100+ABS(N) + CFA=C0 + CFB=(1.0D-30,0.0D0) + DO 25 K=M,0,-1 + CF=Z*CFB+(K+1.0D0)*CFA + IF (K.LE.N0) CPB(K)=CF + CFA=CFB 25 CFB=CF - CS0=CA0/CF - DO 30 K=0,N0 + CS0=CA0/CF + DO 30 K=0,N0 30 CPB(K)=CS0*CPB(K) - ENDIF - ENDIF - ENDIF - CPD(0)=-0.5D0*Z*CPB(0) - IF (N.GE.0) THEN - DO 35 K=1,N + ENDIF + ENDIF + ENDIF + CPD(0)=-0.5D0*Z*CPB(0) + IF (N.GE.0) THEN + DO 35 K=1,N 35 CPD(K)=-0.5D0*Z*CPB(K)+K*CPB(K-1) - ELSE - DO 40 K=1,N0 + ELSE + DO 40 K=1,N0 40 CPD(K)=0.5D0*Z*CPB(K)-CPB(K-1) - ENDIF - RETURN - END + ENDIF + RETURN + END - + C ********************************** SUBROUTINE IK01B(X,BI0,DI0,BI1,DI1,BK0,DK0,BK1,DK1) @@ -8862,97 +8862,97 @@ C functions of the first kind C ======================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION CK(200),DF(200),SJ(0:251),DJ(0:251) - EPS=1.0D-14 - IP=1 - NM1=INT((N-M)/2) - IF (N-M.EQ.2*NM1) IP=0 - NM=25+NM1+INT(C) - REG=1.0D0 - IF (M+NM.GT.80) REG=1.0D-200 - R0=REG - DO 10 J=1,2*M+IP + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION CK(200),DF(200),SJ(0:251),DJ(0:251) + EPS=1.0D-14 + IP=1 + NM1=INT((N-M)/2) + IF (N-M.EQ.2*NM1) IP=0 + NM=25+NM1+INT(C) + REG=1.0D0 + IF (M+NM.GT.80) REG=1.0D-200 + R0=REG + DO 10 J=1,2*M+IP 10 R0=R0*J - R=R0 - SUC=R*DF(1) + R=R0 + SUC=R*DF(1) SW=0.0D0 - DO 15 K=2,NM - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - SUC=SUC+R*DF(K) - IF (K.GT.NM1.AND.DABS(SUC-SW).LT.DABS(SUC)*EPS) GO TO 20 + DO 15 K=2,NM + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + SUC=SUC+R*DF(K) + IF (K.GT.NM1.AND.DABS(SUC-SW).LT.DABS(SUC)*EPS) GO TO 20 15 SW=SUC 20 CONTINUE - IF (X.EQ.0.0) THEN - CALL SCKB(M,N,C,DF,CK) - SUM=0.0D0 + IF (X.EQ.0.0) THEN + CALL SCKB(M,N,C,DF,CK) + SUM=0.0D0 SW1=0.0D0 - DO 25 J=1,NM - SUM=SUM+CK(J) - IF (DABS(SUM-SW1).LT.DABS(SUM)*EPS) GO TO 30 + DO 25 J=1,NM + SUM=SUM+CK(J) + IF (DABS(SUM-SW1).LT.DABS(SUM)*EPS) GO TO 30 25 SW1=SUM 30 R1=1.0D0 - DO 35 J=1,(N+M+IP)/2 + DO 35 J=1,(N+M+IP)/2 35 R1=R1*(J+0.5D0*(N+M+IP)) - R2=1.0D0 - DO 40 J=1,M + R2=1.0D0 + DO 40 J=1,M 40 R2=2.0D0*C*R2*J - R3=1.0D0 - DO 45 J=1,(N-M-IP)/2 + R3=1.0D0 + DO 45 J=1,(N-M-IP)/2 45 R3=R3*J - SA0=(2.0*(M+IP)+1.0)*R1/(2.0**N*C**IP*R2*R3) - IF (IP.EQ.0) THEN - R1F=SUM/(SA0*SUC)*DF(1)*REG - R1D=0.0D0 - ELSE IF (IP.EQ.1) THEN - R1F=0.0D0 - R1D=SUM/(SA0*SUC)*DF(1)*REG - ENDIF - RETURN - ENDIF - CX=C*X - NM2=2*NM+M - CALL SPHJ(NM2,CX,NM2,SJ,DJ) - A0=(1.0D0-KD/(X*X))**(0.5D0*M)/SUC - R1F=0.0D0 + SA0=(2.0*(M+IP)+1.0)*R1/(2.0**N*C**IP*R2*R3) + IF (IP.EQ.0) THEN + R1F=SUM/(SA0*SUC)*DF(1)*REG + R1D=0.0D0 + ELSE IF (IP.EQ.1) THEN + R1F=0.0D0 + R1D=SUM/(SA0*SUC)*DF(1)*REG + ENDIF + RETURN + ENDIF + CX=C*X + NM2=2*NM+M + CALL SPHJ(NM2,CX,NM2,SJ,DJ) + A0=(1.0D0-KD/(X*X))**(0.5D0*M)/SUC + R1F=0.0D0 SW=0.0D0 LG=0 - DO 50 K=1,NM - L=2*K+M-N-2+IP - IF (L.EQ.4*INT(L/4)) LG=1 - IF (L.NE.4*INT(L/4)) LG=-1 - IF (K.EQ.1) THEN - R=R0 - ELSE - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - ENDIF - NP=M+2*K-2+IP - R1F=R1F+LG*R*DF(K)*SJ(NP) - IF (K.GT.NM1.AND.DABS(R1F-SW).LT.DABS(R1F)*EPS) GO TO 55 + DO 50 K=1,NM + L=2*K+M-N-2+IP + IF (L.EQ.4*INT(L/4)) LG=1 + IF (L.NE.4*INT(L/4)) LG=-1 + IF (K.EQ.1) THEN + R=R0 + ELSE + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + ENDIF + NP=M+2*K-2+IP + R1F=R1F+LG*R*DF(K)*SJ(NP) + IF (K.GT.NM1.AND.DABS(R1F-SW).LT.DABS(R1F)*EPS) GO TO 55 50 SW=R1F 55 R1F=R1F*A0 - B0=KD*M/X**3.0D0/(1.0-KD/(X*X))*R1F - SUD=0.0D0 + B0=KD*M/X**3.0D0/(1.0-KD/(X*X))*R1F + SUD=0.0D0 SW=0.0D0 - DO 60 K=1,NM - L=2*K+M-N-2+IP - IF (L.EQ.4*INT(L/4)) LG=1 - IF (L.NE.4*INT(L/4)) LG=-1 - IF (K.EQ.1) THEN - R=R0 - ELSE - R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) - ENDIF - NP=M+2*K-2+IP - SUD=SUD+LG*R*DF(K)*DJ(NP) - IF (K.GT.NM1.AND.DABS(SUD-SW).LT.DABS(SUD)*EPS) GO TO 65 + DO 60 K=1,NM + L=2*K+M-N-2+IP + IF (L.EQ.4*INT(L/4)) LG=1 + IF (L.NE.4*INT(L/4)) LG=-1 + IF (K.EQ.1) THEN + R=R0 + ELSE + R=R*(M+K-1.0)*(M+K+IP-1.5D0)/(K-1.0D0)/(K+IP-1.5D0) + ENDIF + NP=M+2*K-2+IP + SUD=SUD+LG*R*DF(K)*DJ(NP) + IF (K.GT.NM1.AND.DABS(SUD-SW).LT.DABS(SUD)*EPS) GO TO 65 60 SW=SUD 65 R1D=B0+A0*C*SUD - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE DVSA(VA,X,PD) @@ -9158,36 +9158,36 @@ C radial functions with a small argument C =========================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION BK(200) - EPS=1.0D-14 - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - NM=25+INT(0.5*(N-M)+C) - XM=(1.0D0+X*X)**(-0.5D0*M) - GF0=0.0D0 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION BK(200) + EPS=1.0D-14 + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + NM=25+INT(0.5*(N-M)+C) + XM=(1.0D0+X*X)**(-0.5D0*M) + GF0=0.0D0 GW=0.0D0 - DO 10 K=1,NM - GF0=GF0+BK(K)*X**(2.0*K-2.0) - IF (DABS((GF0-GW)/GF0).LT.EPS.AND.K.GE.10) GO TO 15 + DO 10 K=1,NM + GF0=GF0+BK(K)*X**(2.0*K-2.0) + IF (DABS((GF0-GW)/GF0).LT.EPS.AND.K.GE.10) GO TO 15 10 GW=GF0 15 GF=XM*GF0*X**(1-IP) - GD1=-M*X/(1.0D0+X*X)*GF - GD0=0.0D0 - DO 20 K=1,NM - IF (IP.EQ.0) THEN - GD0=GD0+(2.0D0*K-1.0)*BK(K)*X**(2.0*K-2.0) - ELSE - GD0=GD0+2.0D0*K*BK(K+1)*X**(2.0*K-1.0) - ENDIF - IF (DABS((GD0-GW)/GD0).LT.EPS.AND.K.GE.10) GO TO 25 + GD1=-M*X/(1.0D0+X*X)*GF + GD0=0.0D0 + DO 20 K=1,NM + IF (IP.EQ.0) THEN + GD0=GD0+(2.0D0*K-1.0)*BK(K)*X**(2.0*K-2.0) + ELSE + GD0=GD0+2.0D0*K*BK(K+1)*X**(2.0*K-1.0) + ENDIF + IF (DABS((GD0-GW)/GD0).LT.EPS.AND.K.GE.10) GO TO 25 20 GW=GD0 25 GD=GD1+XM*GD0 - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE ITJYA(X,TJ,TY) @@ -9920,51 +9920,51 @@ C kind C ============================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION BK(200),CK(200),DF(200),DN(200) - IF (DABS(DF(1)).LE.1.0D-280) THEN - R2F=1.0D+300 - R2D=1.0D+300 - RETURN - ENDIF - EPS=1.0D-14 - PI=3.141592653589793D0 - NM=25+INT((N-M)/2+C) - IP=1 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - CALL SCKB(M,N,C,DF,CK) - CALL KMN(M,N,C,CV,KD,DF,DN,CK1,CK2) - CALL QSTAR(M,N,C,CK,CK1,QS,QT) - CALL CBK(M,N,C,CV,QT,CK,BK) - IF (X.EQ.0.0D0) THEN - SUM=0.0D0 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION BK(200),CK(200),DF(200),DN(200) + IF (DABS(DF(1)).LE.1.0D-280) THEN + R2F=1.0D+300 + R2D=1.0D+300 + RETURN + ENDIF + EPS=1.0D-14 + PI=3.141592653589793D0 + NM=25+INT((N-M)/2+C) + IP=1 + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + CALL SCKB(M,N,C,DF,CK) + CALL KMN(M,N,C,CV,KD,DF,DN,CK1,CK2) + CALL QSTAR(M,N,C,CK,CK1,QS,QT) + CALL CBK(M,N,C,CV,QT,CK,BK) + IF (X.EQ.0.0D0) THEN + SUM=0.0D0 SW=0.0D0 - DO 10 J=1,NM - SUM=SUM+CK(J) - IF (DABS(SUM-SW).LT.DABS(SUM)*EPS) GO TO 15 + DO 10 J=1,NM + SUM=SUM+CK(J) + IF (DABS(SUM-SW).LT.DABS(SUM)*EPS) GO TO 15 10 SW=SUM 15 IF (IP.EQ.0) THEN - R1F=SUM/CK1 - R2F=-0.5D0*PI*QS*R1F - R2D=QS*R1F+BK(1) - ELSE IF (IP.EQ.1) THEN - R1D=SUM/CK1 - R2F=BK(1) - R2D=-0.5D0*PI*QS*R1D - ENDIF - RETURN - ELSE - CALL GMN(M,N,C,X,BK,GF,GD) - CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) - H0=DATAN(X)-0.5D0*PI - R2F=QS*R1F*H0+GF - R2D=QS*(R1D*H0+R1F/(1.0D0+X*X))+GD - ENDIF - RETURN - END + R1F=SUM/CK1 + R2F=-0.5D0*PI*QS*R1F + R2D=QS*R1F+BK(1) + ELSE IF (IP.EQ.1) THEN + R1D=SUM/CK1 + R2F=BK(1) + R2D=-0.5D0*PI*QS*R1D + ENDIF + RETURN + ELSE + CALL GMN(M,N,C,X,BK,GF,GD) + CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) + H0=DATAN(X)-0.5D0*PI + R2F=QS*R1F*H0+GF + R2D=QS*(R1D*H0+R1F/(1.0D0+X*X))+GD + ENDIF + RETURN + END - + C ********************************** SUBROUTINE CSPHIK(N,Z,NM,CSI,CDI,CSK,CDK) @@ -10106,55 +10106,55 @@ C point for backward recurrence C ======================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION SJ(0:N),DJ(0:N) - NM=N - IF (DABS(X).LT.1.0D-100) THEN - DO 10 K=0,N - SJ(K)=0.0D0 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION SJ(0:N),DJ(0:N) + NM=N + IF (DABS(X).LT.1.0D-100) THEN + DO 10 K=0,N + SJ(K)=0.0D0 10 DJ(K)=0.0D0 SJ(0)=1.0D0 IF (N.GT.0) THEN DJ(1)=.3333333333333333D0 ENDIF - RETURN - ENDIF - SJ(0)=DSIN(X)/X - DJ(0)=(DCOS(X)-DSIN(X)/X)/X + RETURN + ENDIF + SJ(0)=DSIN(X)/X + DJ(0)=(DCOS(X)-DSIN(X)/X)/X IF (N.LT.1) THEN RETURN ENDIF SJ(1)=(SJ(0)-DCOS(X))/X - IF (N.GE.2) THEN - SA=SJ(0) - SB=SJ(1) - M=MSTA1(X,200) - IF (M.LT.N) THEN - NM=M - ELSE - M=MSTA2(X,N,15) - ENDIF + IF (N.GE.2) THEN + SA=SJ(0) + SB=SJ(1) + M=MSTA1(X,200) + IF (M.LT.N) THEN + NM=M + ELSE + M=MSTA2(X,N,15) + ENDIF F=0.0D0 - F0=0.0D0 - F1=1.0D0-100 - DO 15 K=M,0,-1 - F=(2.0D0*K+3.0D0)*F1/X-F0 - IF (K.LE.NM) SJ(K)=F - F0=F1 + F0=0.0D0 + F1=1.0D0-100 + DO 15 K=M,0,-1 + F=(2.0D0*K+3.0D0)*F1/X-F0 + IF (K.LE.NM) SJ(K)=F + F0=F1 15 F1=F CS=0.0D0 - IF (DABS(SA).GT.DABS(SB)) CS=SA/F - IF (DABS(SA).LE.DABS(SB)) CS=SB/F0 - DO 20 K=0,NM + IF (DABS(SA).GT.DABS(SB)) CS=SA/F + IF (DABS(SA).LE.DABS(SB)) CS=SB/F0 + DO 20 K=0,NM 20 SJ(K)=CS*SJ(K) - ENDIF - DO 25 K=1,NM + ENDIF + DO 25 K=1,NM 25 DJ(K)=SJ(K-1)-(K+1.0D0)*SJ(K)/X - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE OTHPL(KF,N,X,PL,DPL) @@ -10311,27 +10311,27 @@ C the second kind for a small argument C ========================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION DF(200) - KD=-1 - CALL SDMN(M,N,C,CV,KD,DF) - IF (KF.NE.2) THEN - CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) - ENDIF - IF (KF.GT.1) THEN - ID=10 - IF (X.GT.1.0D-8) THEN - CALL RMN2L(M,N,C,X,DF,KD,R2F,R2D,ID) - ENDIF - IF (ID.GT.-1) THEN - CALL RMN2SO(M,N,C,X,CV,DF,KD,R2F,R2D) - ENDIF - ENDIF - RETURN - END + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION DF(200) + KD=-1 + CALL SDMN(M,N,C,CV,KD,DF) + IF (KF.NE.2) THEN + CALL RMN1(M,N,C,X,DF,KD,R1F,R1D) + ENDIF + IF (KF.GT.1) THEN + ID=10 + IF (X.GT.1.0D-8) THEN + CALL RMN2L(M,N,C,X,DF,KD,R2F,R2D,ID) + ENDIF + IF (ID.GT.-1) THEN + CALL RMN2SO(M,N,C,X,CV,DF,KD,R2F,R2D) + ENDIF + ENDIF + RETURN + END - + C ********************************** SUBROUTINE CH12N(N,Z,NM,CHF1,CHD1,CHF2,CHD2) @@ -10646,110 +10646,110 @@ C d3, ... for odd n-m C ===================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION A(200),D(200),G(200),DF(200) - NM=25+INT(0.5*(N-M)+C) - IF (C.LT.1.0D-10) THEN - DO 5 I=1,NM + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION A(200),D(200),G(200),DF(200) + NM=25+INT(0.5*(N-M)+C) + IF (C.LT.1.0D-10) THEN + DO 5 I=1,NM 5 DF(I)=0D0 - DF((N-M)/2+1)=1.0D0 - RETURN - ENDIF - CS=C*C*KD - IP=1 + DF((N-M)/2+1)=1.0D0 + RETURN + ENDIF + CS=C*C*KD + IP=1 K=0 - IF (N-M.EQ.2*INT((N-M)/2)) IP=0 - DO 10 I=1,NM+2 - IF (IP.EQ.0) K=2*(I-1) - IF (IP.EQ.1) K=2*I-1 - DK0=M+K - DK1=M+K+1 - DK2=2*(M+K) - D2K=2*M+K - A(I)=(D2K+2.0)*(D2K+1.0)/((DK2+3.0)*(DK2+5.0))*CS - D(I)=DK0*DK1+(2.0*DK0*DK1-2.0*M*M-1.0)/((DK2-1.0) + IF (N-M.EQ.2*INT((N-M)/2)) IP=0 + DO 10 I=1,NM+2 + IF (IP.EQ.0) K=2*(I-1) + IF (IP.EQ.1) K=2*I-1 + DK0=M+K + DK1=M+K+1 + DK2=2*(M+K) + D2K=2*M+K + A(I)=(D2K+2.0)*(D2K+1.0)/((DK2+3.0)*(DK2+5.0))*CS + D(I)=DK0*DK1+(2.0*DK0*DK1-2.0*M*M-1.0)/((DK2-1.0) & *(DK2+3.0))*CS - G(I)=K*(K-1.0)/((DK2-3.0)*(DK2-1.0))*CS + G(I)=K*(K-1.0)/((DK2-3.0)*(DK2-1.0))*CS 10 CONTINUE - FS=1.0D0 - F1=0.0D0 - F0=1.0D-100 - KB=0 - DF(NM+1)=0.0D0 + FS=1.0D0 + F1=0.0D0 + F0=1.0D-100 + KB=0 + DF(NM+1)=0.0D0 FL=0.0D0 - DO 30 K=NM,1,-1 - F=-((D(K+1)-CV)*F0+A(K+1)*F1)/G(K+1) - IF (DABS(F).GT.DABS(DF(K+1))) THEN - DF(K)=F - F1=F0 - F0=F - IF (DABS(F).GT.1.0D+100) THEN - DO 12 K1=K,NM + DO 30 K=NM,1,-1 + F=-((D(K+1)-CV)*F0+A(K+1)*F1)/G(K+1) + IF (DABS(F).GT.DABS(DF(K+1))) THEN + DF(K)=F + F1=F0 + F0=F + IF (DABS(F).GT.1.0D+100) THEN + DO 12 K1=K,NM 12 DF(K1)=DF(K1)*1.0D-100 - F1=F1*1.0D-100 - F0=F0*1.0D-100 - ENDIF - ELSE - KB=K - FL=DF(K+1) - F1=1.0D-100 - F2=-(D(1)-CV)/A(1)*F1 - DF(1)=F1 - IF (KB.EQ.1) THEN - FS=F2 - ELSE IF (KB.EQ.2) THEN - DF(2)=F2 - FS=-((D(2)-CV)*F2+G(2)*F1)/A(2) - ELSE - DF(2)=F2 - DO 20 J=3,KB+1 - F=-((D(J-1)-CV)*F2+G(J-1)*F1)/A(J-1) - IF (J.LE.KB) DF(J)=F - IF (DABS(F).GT.1.0D+100) THEN - DO 15 K1=1,J + F1=F1*1.0D-100 + F0=F0*1.0D-100 + ENDIF + ELSE + KB=K + FL=DF(K+1) + F1=1.0D-100 + F2=-(D(1)-CV)/A(1)*F1 + DF(1)=F1 + IF (KB.EQ.1) THEN + FS=F2 + ELSE IF (KB.EQ.2) THEN + DF(2)=F2 + FS=-((D(2)-CV)*F2+G(2)*F1)/A(2) + ELSE + DF(2)=F2 + DO 20 J=3,KB+1 + F=-((D(J-1)-CV)*F2+G(J-1)*F1)/A(J-1) + IF (J.LE.KB) DF(J)=F + IF (DABS(F).GT.1.0D+100) THEN + DO 15 K1=1,J 15 DF(K1)=DF(K1)*1.0D-100 - F=F*1.0D-100 - F2=F2*1.0D-100 - ENDIF - F1=F2 + F=F*1.0D-100 + F2=F2*1.0D-100 + ENDIF + F1=F2 20 F2=F - FS=F - ENDIF - GO TO 35 - ENDIF + FS=F + ENDIF + GO TO 35 + ENDIF 30 CONTINUE 35 SU1=0.0D0 - R1=1.0D0 - DO 40 J=M+IP+1,2*(M+IP) + R1=1.0D0 + DO 40 J=M+IP+1,2*(M+IP) 40 R1=R1*J - SU1=DF(1)*R1 - DO 45 K=2,KB - R1=-R1*(K+M+IP-1.5D0)/(K-1.0D0) + SU1=DF(1)*R1 + DO 45 K=2,KB + R1=-R1*(K+M+IP-1.5D0)/(K-1.0D0) 45 SU1=SU1+R1*DF(K) - SU2=0.0D0 + SU2=0.0D0 SW=0.0D0 - DO 50 K=KB+1,NM - IF (K.NE.1) R1=-R1*(K+M+IP-1.5D0)/(K-1.0D0) - SU2=SU2+R1*DF(K) - IF (DABS(SW-SU2).LT.DABS(SU2)*1.0D-14) GOTO 55 + DO 50 K=KB+1,NM + IF (K.NE.1) R1=-R1*(K+M+IP-1.5D0)/(K-1.0D0) + SU2=SU2+R1*DF(K) + IF (DABS(SW-SU2).LT.DABS(SU2)*1.0D-14) GOTO 55 50 SW=SU2 55 R3=1.0D0 - DO 60 J=1,(M+N+IP)/2 + DO 60 J=1,(M+N+IP)/2 60 R3=R3*(J+0.5D0*(N+M+IP)) - R4=1.0D0 - DO 65 J=1,(N-M-IP)/2 + R4=1.0D0 + DO 65 J=1,(N-M-IP)/2 65 R4=-4.0D0*R4*J - S0=R3/(FL*(SU1/FS)+SU2)/R4 - DO 70 K=1,KB + S0=R3/(FL*(SU1/FS)+SU2)/R4 + DO 70 K=1,KB 70 DF(K)=FL/FS*S0*DF(K) - DO 75 K=KB+1,NM + DO 75 K=KB+1,NM 75 DF(K)=S0*DF(K) - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE AJYIK(X,VJ1,VJ2,VY1,VY2,VI1,VI2,VK1,VK2) @@ -11731,21 +11731,21 @@ C Output: GA --- ?(x) C ===================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - PI=3.141592653589793D0 - IF (X.EQ.INT(X).AND.X.GT.0.0) THEN - GA=1.0D0 - M1=INT(X-1.0) - DO 10 K=2,M1 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + PI=3.141592653589793D0 + IF (X.EQ.INT(X).AND.X.GT.0.0) THEN + GA=1.0D0 + M1=INT(X-1.0) + DO 10 K=2,M1 10 GA=GA*K - ELSE IF (X+.5D0.EQ.INT(X+.5D0).AND.X.GT.0.0) THEN - M=INT(X) - GA=DSQRT(PI) - DO 15 K=1,M + ELSE IF (X+.5D0.EQ.INT(X+.5D0).AND.X.GT.0.0) THEN + M=INT(X) + GA=DSQRT(PI) + DO 15 K=1,M 15 GA=0.5D0*GA*(2.0D0*K-1.0D0) - ENDIF - RETURN - END + ENDIF + RETURN + END C ********************************** @@ -11986,87 +11986,87 @@ C ( L = n' - m + 1 ) C ========================================================= C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION B(100),H(100),D(300),E(300),F(300),CV0(100), + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION B(100),H(100),D(300),E(300),F(300),CV0(100), & A(300),G(300),EG(200) - IF (C.LT.1.0D-10) THEN - DO 5 I=1,N-M+1 + IF (C.LT.1.0D-10) THEN + DO 5 I=1,N-M+1 5 EG(I)=(I+M)*(I+M-1.0D0) - GO TO 70 - ENDIF - ICM=(N-M+2)/2 - NM=10+INT(0.5*(N-M)+C) - CS=C*C*KD + GO TO 70 + ENDIF + ICM=(N-M+2)/2 + NM=10+INT(0.5*(N-M)+C) + CS=C*C*KD K=0 - DO 60 L=0,1 - DO 10 I=1,NM - IF (L.EQ.0) K=2*(I-1) - IF (L.EQ.1) K=2*I-1 - DK0=M+K - DK1=M+K+1 - DK2=2*(M+K) - D2K=2*M+K - A(I)=(D2K+2.0)*(D2K+1.0)/((DK2+3.0)*(DK2+5.0))*CS - D(I)=DK0*DK1+(2.0*DK0*DK1-2.0*M*M-1.0)/((DK2-1.0) + DO 60 L=0,1 + DO 10 I=1,NM + IF (L.EQ.0) K=2*(I-1) + IF (L.EQ.1) K=2*I-1 + DK0=M+K + DK1=M+K+1 + DK2=2*(M+K) + D2K=2*M+K + A(I)=(D2K+2.0)*(D2K+1.0)/((DK2+3.0)*(DK2+5.0))*CS + D(I)=DK0*DK1+(2.0*DK0*DK1-2.0*M*M-1.0)/((DK2-1.0) & *(DK2+3.0))*CS 10 G(I)=K*(K-1.0)/((DK2-3.0)*(DK2-1.0))*CS - DO 15 K=2,NM - E(K)=DSQRT(A(K-1)*G(K)) + DO 15 K=2,NM + E(K)=DSQRT(A(K-1)*G(K)) 15 F(K)=E(K)*E(K) - F(1)=0.0D0 - E(1)=0.0D0 - XA=D(NM)+DABS(E(NM)) - XB=D(NM)-DABS(E(NM)) - NM1=NM-1 - DO 20 I=1,NM1 - T=DABS(E(I))+DABS(E(I+1)) - T1=D(I)+T - IF (XA.LT.T1) XA=T1 - T1=D(I)-T - IF (T1.LT.XB) XB=T1 + F(1)=0.0D0 + E(1)=0.0D0 + XA=D(NM)+DABS(E(NM)) + XB=D(NM)-DABS(E(NM)) + NM1=NM-1 + DO 20 I=1,NM1 + T=DABS(E(I))+DABS(E(I+1)) + T1=D(I)+T + IF (XA.LT.T1) XA=T1 + T1=D(I)-T + IF (T1.LT.XB) XB=T1 20 CONTINUE - DO 25 I=1,ICM - B(I)=XA + DO 25 I=1,ICM + B(I)=XA 25 H(I)=XB - DO 55 K=1,ICM - DO 30 K1=K,ICM - IF (B(K1).LT.B(K)) THEN - B(K)=B(K1) - GO TO 35 - ENDIF + DO 55 K=1,ICM + DO 30 K1=K,ICM + IF (B(K1).LT.B(K)) THEN + B(K)=B(K1) + GO TO 35 + ENDIF 30 CONTINUE 35 IF (K.NE.1.AND.H(K).LT.H(K-1)) H(K)=H(K-1) 40 X1=(B(K)+H(K))/2.0D0 - CV0(K)=X1 - IF (DABS((B(K)-H(K))/X1).LT.1.0D-14) GO TO 50 - J=0 - S=1.0D0 - DO 45 I=1,NM - IF (S.EQ.0.0D0) S=S+1.0D-30 - T=F(I)/S - S=D(I)-T-X1 - IF (S.LT.0.0D0) J=J+1 + CV0(K)=X1 + IF (DABS((B(K)-H(K))/X1).LT.1.0D-14) GO TO 50 + J=0 + S=1.0D0 + DO 45 I=1,NM + IF (S.EQ.0.0D0) S=S+1.0D-30 + T=F(I)/S + S=D(I)-T-X1 + IF (S.LT.0.0D0) J=J+1 45 CONTINUE - IF (J.LT.K) THEN - H(K)=X1 - ELSE - B(K)=X1 - IF (J.GE.ICM) THEN - B(ICM)=X1 - ELSE - IF (H(J+1).LT.X1) H(J+1)=X1 - IF (X1.LT.B(J)) B(J)=X1 - ENDIF - ENDIF - GO TO 40 + IF (J.LT.K) THEN + H(K)=X1 + ELSE + B(K)=X1 + IF (J.GE.ICM) THEN + B(ICM)=X1 + ELSE + IF (H(J+1).LT.X1) H(J+1)=X1 + IF (X1.LT.B(J)) B(J)=X1 + ENDIF + ENDIF + GO TO 40 50 CV0(K)=X1 - IF (L.EQ.0) EG(2*K-1)=CV0(K) - IF (L.EQ.1) EG(2*K)=CV0(K) + IF (L.EQ.0) EG(2*K-1)=CV0(K) + IF (L.EQ.1) EG(2*K)=CV0(K) 55 CONTINUE 60 CONTINUE 70 CV=EG(N-M+1) - RETURN - END + RETURN + END C ********************************** @@ -12283,101 +12283,101 @@ C derivatives C ============================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION FG(251),BJ1(0:251),DJ1(0:251),BJ2(0:251),DJ2(0:251), + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION FG(251),BJ1(0:251),DJ1(0:251),BJ2(0:251),DJ2(0:251), & BY1(0:251),DY1(0:251),BY2(0:251),DY2(0:251) - EPS=1.0D-14 - IF (KF.EQ.1.AND.M.EQ.2*INT(M/2)) KD=1 - IF (KF.EQ.1.AND.M.NE.2*INT(M/2)) KD=2 - IF (KF.EQ.2.AND.M.NE.2*INT(M/2)) KD=3 - IF (KF.EQ.2.AND.M.EQ.2*INT(M/2)) KD=4 - CALL CVA2(KD,M,Q,A) - IF (Q.LE.1.0D0) THEN - QM=7.5+56.1*SQRT(Q)-134.7*Q+90.7*SQRT(Q)*Q - ELSE - QM=17.0+3.1*SQRT(Q)-.126*Q+.0037*SQRT(Q)*Q - ENDIF - KM=INT(QM+0.5*M) - CALL FCOEF(KD,M,Q,A,FG) - IC=INT(M/2)+1 - IF (KD.EQ.4) IC=M/2 - C1=DEXP(-X) - C2=DEXP(X) - U1=DSQRT(Q)*C1 - U2=DSQRT(Q)*C2 - CALL JYNB(KM,U1,NM,BJ1,DJ1,BY1,DY1) - CALL JYNB(KM,U2,NM,BJ2,DJ2,BY2,DY2) + EPS=1.0D-14 + IF (KF.EQ.1.AND.M.EQ.2*INT(M/2)) KD=1 + IF (KF.EQ.1.AND.M.NE.2*INT(M/2)) KD=2 + IF (KF.EQ.2.AND.M.NE.2*INT(M/2)) KD=3 + IF (KF.EQ.2.AND.M.EQ.2*INT(M/2)) KD=4 + CALL CVA2(KD,M,Q,A) + IF (Q.LE.1.0D0) THEN + QM=7.5+56.1*SQRT(Q)-134.7*Q+90.7*SQRT(Q)*Q + ELSE + QM=17.0+3.1*SQRT(Q)-.126*Q+.0037*SQRT(Q)*Q + ENDIF + KM=INT(QM+0.5*M) + CALL FCOEF(KD,M,Q,A,FG) + IC=INT(M/2)+1 + IF (KD.EQ.4) IC=M/2 + C1=DEXP(-X) + C2=DEXP(X) + U1=DSQRT(Q)*C1 + U2=DSQRT(Q)*C2 + CALL JYNB(KM,U1,NM,BJ1,DJ1,BY1,DY1) + CALL JYNB(KM,U2,NM,BJ2,DJ2,BY2,DY2) W1=0.0D0 W2=0.0D0 - IF (KC.EQ.2) GO TO 50 - F1R=0.0D0 - DO 30 K=1,KM - IF (KD.EQ.1) THEN - F1R=F1R+(-1)**(IC+K)*FG(K)*BJ1(K-1)*BJ2(K-1) - ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN - F1R=F1R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BJ2(K) + IF (KC.EQ.2) GO TO 50 + F1R=0.0D0 + DO 30 K=1,KM + IF (KD.EQ.1) THEN + F1R=F1R+(-1)**(IC+K)*FG(K)*BJ1(K-1)*BJ2(K-1) + ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN + F1R=F1R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BJ2(K) & +(-1)**KD*BJ1(K)*BJ2(K-1)) - ELSE - F1R=F1R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BJ2(K+1) + ELSE + F1R=F1R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BJ2(K+1) & -BJ1(K+1)*BJ2(K-1)) - ENDIF - IF (K.GE.5.AND.DABS(F1R-W1).LT.DABS(F1R)*EPS) GO TO 35 + ENDIF + IF (K.GE.5.AND.DABS(F1R-W1).LT.DABS(F1R)*EPS) GO TO 35 30 W1=F1R 35 F1R=F1R/FG(1) - D1R=0.0D0 - DO 40 K=1,KM - IF (KD.EQ.1) THEN - D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*BJ1(K-1)*DJ2(K-1) + D1R=0.0D0 + DO 40 K=1,KM + IF (KD.EQ.1) THEN + D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*BJ1(K-1)*DJ2(K-1) & -C1*DJ1(K-1)*BJ2(K-1)) - ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN - D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DJ2(K) + ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN + D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DJ2(K) & +(-1)**KD*BJ1(K)*DJ2(K-1))-C1*(DJ1(K-1)*BJ2(K) & +(-1)**KD*DJ1(K)*BJ2(K-1))) - ELSE - D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DJ2(K+1) + ELSE + D1R=D1R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DJ2(K+1) & -BJ1(K+1)*DJ2(K-1))-C1*(DJ1(K-1)*BJ2(K+1) & -DJ1(K+1)*BJ2(K-1))) - ENDIF - IF (K.GE.5.AND.DABS(D1R-W2).LT.DABS(D1R)*EPS) GO TO 45 + ENDIF + IF (K.GE.5.AND.DABS(D1R-W2).LT.DABS(D1R)*EPS) GO TO 45 40 W2=D1R 45 D1R=D1R*DSQRT(Q)/FG(1) - IF (KC.EQ.1) RETURN + IF (KC.EQ.1) RETURN 50 F2R=0.0D0 - DO 55 K=1,KM - IF (KD.EQ.1) THEN - F2R=F2R+(-1)**(IC+K)*FG(K)*BJ1(K-1)*BY2(K-1) - ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN - F2R=F2R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BY2(K) + DO 55 K=1,KM + IF (KD.EQ.1) THEN + F2R=F2R+(-1)**(IC+K)*FG(K)*BJ1(K-1)*BY2(K-1) + ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN + F2R=F2R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BY2(K) & +(-1)**KD*BJ1(K)*BY2(K-1)) - ELSE - F2R=F2R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BY2(K+1) + ELSE + F2R=F2R+(-1)**(IC+K)*FG(K)*(BJ1(K-1)*BY2(K+1) & -BJ1(K+1)*BY2(K-1)) - ENDIF - IF (K.GE.5.AND.DABS(F2R-W1).LT.DABS(F2R)*EPS) GO TO 60 + ENDIF + IF (K.GE.5.AND.DABS(F2R-W1).LT.DABS(F2R)*EPS) GO TO 60 55 W1=F2R 60 F2R=F2R/FG(1) - D2R=0.0D0 - DO 65 K=1,KM - IF (KD.EQ.1) THEN - D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*BJ1(K-1)*DY2(K-1) + D2R=0.0D0 + DO 65 K=1,KM + IF (KD.EQ.1) THEN + D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*BJ1(K-1)*DY2(K-1) & -C1*DJ1(K-1)*BY2(K-1)) - ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN - D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DY2(K) + ELSE IF (KD.EQ.2.OR.KD.EQ.3) THEN + D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DY2(K) & +(-1)**KD*BJ1(K)*DY2(K-1))-C1*(DJ1(K-1)*BY2(K) & +(-1)**KD*DJ1(K)*BY2(K-1))) - ELSE - D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DY2(K+1) + ELSE + D2R=D2R+(-1)**(IC+K)*FG(K)*(C2*(BJ1(K-1)*DY2(K+1) & -BJ1(K+1)*DY2(K-1))-C1*(DJ1(K-1)*BY2(K+1) & -DJ1(K+1)*BY2(K-1))) - ENDIF - IF (K.GE.5.AND.DABS(D2R-W2).LT.DABS(D2R)*EPS) GO TO 70 + ENDIF + IF (K.GE.5.AND.DABS(D2R-W2).LT.DABS(D2R)*EPS) GO TO 70 65 W2=D2R 70 D2R=D2R*DSQRT(Q)/FG(1) - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE CIK01(Z,CBI0,CDI0,CBI1,CDI1,CBK0,CDK0,CBK1,CDK1) @@ -12582,37 +12582,37 @@ C NM --- Highest order computed C ====================================================== C - IMPLICIT DOUBLE PRECISION (A-H,O-Z) - DIMENSION SY(0:N),DY(0:N) - NM=N - IF (X.LT.1.0D-60) THEN - DO 10 K=0,N - SY(K)=-1.0D+300 + IMPLICIT DOUBLE PRECISION (A-H,O-Z) + DIMENSION SY(0:N),DY(0:N) + NM=N + IF (X.LT.1.0D-60) THEN + DO 10 K=0,N + SY(K)=-1.0D+300 10 DY(K)=1.0D+300 - RETURN - ENDIF - SY(0)=-DCOS(X)/X - F0=SY(0) - DY(0)=(DSIN(X)+DCOS(X)/X)/X + RETURN + ENDIF + SY(0)=-DCOS(X)/X + F0=SY(0) + DY(0)=(DSIN(X)+DCOS(X)/X)/X IF (N.LT.1) THEN RETURN ENDIF SY(1)=(SY(0)-DSIN(X))/X F1=SY(1) - DO 15 K=2,N - F=(2.0D0*K-1.0D0)*F1/X-F0 - SY(K)=F - IF (DABS(F).GE.1.0D+300) GO TO 20 - F0=F1 + DO 15 K=2,N + F=(2.0D0*K-1.0D0)*F1/X-F0 + SY(K)=F + IF (DABS(F).GE.1.0D+300) GO TO 20 + F0=F1 15 F1=F 20 NM=K-1 - DO 25 K=1,NM + DO 25 K=1,NM 25 DY(K)=SY(K-1)-(K+1.0D0)*SY(K)/X - RETURN - END + RETURN + END - + C ********************************** SUBROUTINE JELP(U,HK,ESN,ECN,EDN,EPH) @@ -12760,6 +12760,6 @@ END - + C ********************************** From scipy-svn at scipy.org Mon May 7 12:24:39 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 7 May 2007 11:24:39 -0500 (CDT) Subject: [Scipy-svn] r2972 - trunk/Lib/sandbox/numexpr Message-ID: <20070507162439.0AB2339C01A@new.scipy.org> Author: cookedm Date: 2007-05-07 11:24:37 -0500 (Mon, 07 May 2007) New Revision: 2972 Modified: trunk/Lib/sandbox/numexpr/interpreter.c Log: [numexpr] fix #407: Wrong assumption of char signedness in Numexpr Modified: trunk/Lib/sandbox/numexpr/interpreter.c =================================================================== --- trunk/Lib/sandbox/numexpr/interpreter.c 2007-05-07 16:19:10 UTC (rev 2971) +++ trunk/Lib/sandbox/numexpr/interpreter.c 2007-05-07 16:24:37 UTC (rev 2972) @@ -103,7 +103,8 @@ }; /* returns the sig of the nth op, '\0' if no more ops -1 on failure */ -static int op_signature(int op, int n) { +static int +op_signature(int op, int n) { switch (op) { case OP_NOOP: break; @@ -432,7 +433,8 @@ static char get_return_sig(PyObject* program) { - char last_opcode, sig; + int sig; + char last_opcode; int end = PyString_Size(program); do { end -= 4; @@ -440,8 +442,11 @@ } while ((last_opcode = PyString_AS_STRING(program)[end]) == OP_NOOP); sig = op_signature(last_opcode, 0); - if (sig <= 0) return 'X'; - return sig; + if (sig <= 0) { + return 'X'; + } else { + return (char)sig; + } } static int From scipy-svn at scipy.org Tue May 8 11:59:35 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 8 May 2007 10:59:35 -0500 (CDT) Subject: [Scipy-svn] r2973 - trunk/Lib/sandbox/maskedarray Message-ID: <20070508155935.D650D39C15A@new.scipy.org> Author: pierregm Date: 2007-05-08 10:59:31 -0500 (Tue, 08 May 2007) New Revision: 2973 Modified: trunk/Lib/sandbox/maskedarray/core.py Log: core: _extrema_operation.reduce force a ravel() on array if axis=None Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-07 16:24:37 UTC (rev 2972) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-08 15:59:31 UTC (rev 2973) @@ -1214,6 +1214,7 @@ self._hardmask = False def unshare_mask(self): + "Copies the mask and set the sharedmask flag to False." if self._sharedmask: self._mask = self._mask.copy() self._sharedmask = False @@ -2010,6 +2011,7 @@ kargs = { 'axis' : axis } else: kargs = {} + target = target.ravel() if m is nomask: t = self.ufunc.reduce(target, **kargs) @@ -2645,6 +2647,5 @@ assert_equal(y._mask, [True, True]) # if 1: - x = arange(10) - x[0] = masked - print dot(x,x) + x = arange(64).reshape(8,8) + z = maximum(x) From scipy-svn at scipy.org Wed May 9 13:08:35 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 12:08:35 -0500 (CDT) Subject: [Scipy-svn] r2974 - in trunk/Lib/sandbox/timeseries: . include Message-ID: <20070509170835.6BF0939C0BB@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 12:08:29 -0500 (Wed, 09 May 2007) New Revision: 2974 Added: trunk/Lib/sandbox/timeseries/include/ trunk/Lib/sandbox/timeseries/include/c_lib.h trunk/Lib/sandbox/timeseries/include/c_tdates.h trunk/Lib/sandbox/timeseries/include/c_tseries.h Log: adding files related to restructuring of the C code Added: trunk/Lib/sandbox/timeseries/include/c_lib.h =================================================================== --- trunk/Lib/sandbox/timeseries/include/c_lib.h 2007-05-08 15:59:31 UTC (rev 2973) +++ trunk/Lib/sandbox/timeseries/include/c_lib.h 2007-05-09 17:08:29 UTC (rev 2974) @@ -0,0 +1,30 @@ +#ifndef C_LIB_H +#define C_LIB_H + +#include +#include +#include "arrayobject.h" + +/* c_lib defines generic functions that aren't inherently time series/date +specific but are needed in various parts of the module. */ + +#define INT_ERR_CODE -999 + +#define MEM_CHECK(item) if (item == NULL) { return PyErr_NoMemory(); } +#define ERR_CHECK(item) if (item == NULL) { return NULL; } + +char *str_uppercase(char *); +char *str_replace(const char*, const char*, const char*); + +PyObject *np_add(PyObject*, PyObject*); +PyObject *np_multiply(PyObject*, PyObject*); +PyObject *np_subtract(PyObject*, PyObject*); +PyObject *np_sqrt(PyObject*); +int np_greater(PyObject*, PyObject*); +int np_greater_equal(PyObject*, PyObject*); + +PyObject *set_callback(PyObject*, PyObject**); + +void import_c_lib(PyObject*); + +#endif Added: trunk/Lib/sandbox/timeseries/include/c_tdates.h =================================================================== --- trunk/Lib/sandbox/timeseries/include/c_tdates.h 2007-05-08 15:59:31 UTC (rev 2973) +++ trunk/Lib/sandbox/timeseries/include/c_tdates.h 2007-05-09 17:08:29 UTC (rev 2974) @@ -0,0 +1,129 @@ +#ifndef C_TDATES_H +#define C_TDATES_H + +#include "c_lib.h" + +#define HIGHFREQ_ORIG 719163 + +/*** FREQUENCY CONSTANTS ***/ + +#define FR_ANN 1000 /* Annual */ +#define FR_ANNDEC FR_ANN /* Annual - December year end*/ +#define FR_ANNJAN 1001 /* Annual - January year end*/ +#define FR_ANNFEB 1002 /* Annual - February year end*/ +#define FR_ANNMAR 1003 /* Annual - March year end*/ +#define FR_ANNAPR 1004 /* Annual - April year end*/ +#define FR_ANNMAY 1005 /* Annual - May year end*/ +#define FR_ANNJUN 1006 /* Annual - June year end*/ +#define FR_ANNJUL 1007 /* Annual - July year end*/ +#define FR_ANNAUG 1008 /* Annual - August year end*/ +#define FR_ANNSEP 1009 /* Annual - September year end*/ +#define FR_ANNOCT 1010 /* Annual - October year end*/ +#define FR_ANNNOV 1011 /* Annual - November year end*/ + +/* The standard quarterly frequencies. Year is determined by what year the end + month lies in. */ +#define FR_QTR 2000 /* Quarterly - December year end (default quarterly) */ +#define FR_QTRDEC FR_QTR /* Quarterly - December year end */ +#define FR_QTRJAN 2001 /* Quarterly - January year end */ +#define FR_QTRFEB 2002 /* Quarterly - February year end */ +#define FR_QTRMAR 2003 /* Quarterly - March year end */ +#define FR_QTRAPR 2004 /* Quarterly - April year end */ +#define FR_QTRMAY 2005 /* Quarterly - May year end */ +#define FR_QTRJUN 2006 /* Quarterly - June year end */ +#define FR_QTRJUL 2007 /* Quarterly - July year end */ +#define FR_QTRAUG 2008 /* Quarterly - August year end */ +#define FR_QTRSEP 2009 /* Quarterly - September year end */ +#define FR_QTROCT 2010 /* Quarterly - October year end */ +#define FR_QTRNOV 2011 /* Quarterly - November year end */ + +/* End period based quarterly frequencies. Year is determined by what year the + end month lies in. */ +#define FR_QTREDEC FR_QTRDEC /* Quarterly - December year end*/ +#define FR_QTREJAN FR_QTRJAN /* Quarterly - January year end*/ +#define FR_QTREFEB FR_QTRFEB /* Quarterly - February year end*/ +#define FR_QTREMAR FR_QTRMAR /* Quarterly - March year end*/ +#define FR_QTREAPR FR_QTRAPR /* Quarterly - April year end*/ +#define FR_QTREMAY FR_QTRMAY /* Quarterly - May year end*/ +#define FR_QTREJUN FR_QTRJUN /* Quarterly - June year end*/ +#define FR_QTREJUL FR_QTRJUL /* Quarterly - July year end*/ +#define FR_QTREAUG FR_QTRAUG /* Quarterly - August year end*/ +#define FR_QTRESEP FR_QTRSEP /* Quarterly - September year end*/ +#define FR_QTREOCT FR_QTROCT /* Quarterly - October year end*/ +#define FR_QTRENOV FR_QTRNOV /* Quarterly - November year end*/ + +/* Starting period based quarterly frequencies. Year is determined by what year + the starting month lies in. */ +#define FR_QTRSDEC FR_QTRDEC+12 /* Quarterly - December year end*/ +#define FR_QTRSJAN FR_QTRJAN+12 /* Quarterly - January year end*/ +#define FR_QTRSFEB FR_QTRFEB+12 /* Quarterly - February year end*/ +#define FR_QTRSMAR FR_QTRMAR+12 /* Quarterly - March year end*/ +#define FR_QTRSAPR FR_QTRAPR+12 /* Quarterly - April year end*/ +#define FR_QTRSMAY FR_QTRMAY+12 /* Quarterly - May year end*/ +#define FR_QTRSJUN FR_QTRJUN+12 /* Quarterly - June year end*/ +#define FR_QTRSJUL FR_QTRJUL+12 /* Quarterly - July year end*/ +#define FR_QTRSAUG FR_QTRAUG+12 /* Quarterly - August year end*/ +#define FR_QTRSSEP FR_QTRSEP+12 /* Quarterly - September year end*/ +#define FR_QTRSOCT FR_QTROCT+12 /* Quarterly - October year end*/ +#define FR_QTRSNOV FR_QTRNOV+12 /* Quarterly - November year end*/ + +#define FR_MTH 3000 /* Monthly */ + +#define FR_WK 4000 /* Weekly */ +#define FR_WKSUN FR_WK /* Weekly - Sunday end of week */ +#define FR_WKMON 4001 /* Weekly - Monday end of week */ +#define FR_WKTUE 4002 /* Weekly - Tuesday end of week */ +#define FR_WKWED 4003 /* Weekly - Wednesday end of week */ +#define FR_WKTHU 4004 /* Weekly - Thursday end of week */ +#define FR_WKFRI 4005 /* Weekly - Friday end of week */ +#define FR_WKSAT 4006 /* Weekly - Saturday end of week */ + +#define FR_BUS 5000 /* Business days */ +#define FR_DAY 6000 /* Daily */ +#define FR_HR 7000 /* Hourly */ +#define FR_MIN 8000 /* Minutely */ +#define FR_SEC 9000 /* Secondly */ +#define FR_UND -10000 /* Undefined */ + +//////////////////////////////////////////////////// + +int get_freq_group(int); + +typedef struct { + int from_week_end; //day the week ends on in the "from" frequency + int to_week_end; //day the week ends on in the "to" frequency + + int from_a_year_end; //month the year ends on in the "from" frequency + int to_a_year_end; //month the year ends on in the "to" frequency + + int from_q_year_end; //month the year ends on in the "from" frequency + int to_q_year_end; //month the year ends on in the "to" frequency +} asfreq_info; + +int check_freq(PyObject *); +void get_asfreq_info(int, int, asfreq_info*); +long (*get_asfreq_func(int, int, int))(long, char, asfreq_info*); + +#define CHECK_ASFREQ(result) if ((result) == INT_ERR_CODE) return NULL + +PyObject *DateArray_asfreq(PyObject *, PyObject *); +PyObject *DateArray_getDateInfo(PyObject *, PyObject *); + +char c_tdates_thisday_doc[]; +PyObject *c_tdates_thisday(PyObject *, PyObject *); + +char c_tdates_check_freq_doc[]; +PyObject *c_tdates_check_freq(PyObject *, PyObject *); + +char c_tdates_check_freq_str_doc[]; +PyObject *c_tdates_check_freq_str(PyObject *, PyObject *); + +char c_tdates_get_freq_group_doc[]; +PyObject *c_tdates_get_freq_group(PyObject *, PyObject *); + +PyObject *set_callback_DateFromString(PyObject *, PyObject *); +PyObject *set_callback_DateTimeFromString(PyObject *, PyObject *); + +void import_c_tdates(PyObject *); + +#endif Added: trunk/Lib/sandbox/timeseries/include/c_tseries.h =================================================================== --- trunk/Lib/sandbox/timeseries/include/c_tseries.h 2007-05-08 15:59:31 UTC (rev 2973) +++ trunk/Lib/sandbox/timeseries/include/c_tseries.h 2007-05-09 17:08:29 UTC (rev 2974) @@ -0,0 +1,15 @@ +#ifndef C_TSERIES_H +#define C_TSERIES_H + +#include "c_lib.h" + +PyObject *TimeSeries_convert(PyObject *, PyObject *); + +PyObject *MaskedArray_mov_sum(PyObject *, PyObject *, PyObject *); +PyObject *MaskedArray_mov_median(PyObject *, PyObject *, PyObject *); +PyObject *MaskedArray_mov_average(PyObject *, PyObject *, PyObject *); +PyObject *MaskedArray_mov_stddev(PyObject *, PyObject *, PyObject *); + +void import_c_tseries(PyObject *); + +#endif From scipy-svn at scipy.org Wed May 9 13:09:42 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 12:09:42 -0500 (CDT) Subject: [Scipy-svn] r2975 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070509170942.671DE39C0BB@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 12:09:36 -0500 (Wed, 09 May 2007) New Revision: 2975 Added: trunk/Lib/sandbox/timeseries/src/c_lib.c trunk/Lib/sandbox/timeseries/src/c_tdates.c trunk/Lib/sandbox/timeseries/src/c_tseries.c Log: new files related to restructuring of C code Added: trunk/Lib/sandbox/timeseries/src/c_lib.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_lib.c 2007-05-09 17:08:29 UTC (rev 2974) +++ trunk/Lib/sandbox/timeseries/src/c_lib.c 2007-05-09 17:09:36 UTC (rev 2975) @@ -0,0 +1,204 @@ +#include "c_lib.h" +#include "arrayobject.h" + +// Numpy UFUNCS +static PyObject *NP_ADD, *NP_MULTIPLY, *NP_SUBTRACT, *NP_SQRT, + *NP_GREATER, *NP_GREATER_EQUAL; + +/********************************************************* +** Convenience wrappers for numpy UFUNCS ** +*********************************************************/ +PyObject* +np_add(PyObject *left_val, PyObject *right_val) { + + PyObject *result; + + result = PyObject_CallFunction( + NP_ADD, "OO", + (PyArrayObject*)left_val, + right_val); + return result; +} + +PyObject* +np_subtract(PyObject *left_val, PyObject *right_val) { + + PyObject *result; + + result = PyObject_CallFunction( + NP_SUBTRACT, "OO", + (PyArrayObject*)left_val, + right_val); + return result; +} + +PyObject* +np_multiply(PyObject *left_val, PyObject *right_val) { + + PyObject *result; + + result = PyObject_CallFunction( + NP_MULTIPLY, "OO", + (PyArrayObject*)left_val, + right_val); + return result; +} + +PyObject* +np_sqrt(PyObject *val) { + return PyObject_CallFunction(NP_SQRT, "(O)", val); +} + +int np_greater(PyObject *left_val, PyObject *right_val) { + + PyObject *temp; + int result; + + temp = PyObject_CallFunction( + NP_GREATER, "OO", + (PyArrayObject*)left_val, + right_val); + + result = (int)PyInt_AsLong(temp); + Py_DECREF(temp); + return result; +} + +int np_greater_equal(PyObject *left_val, PyObject *right_val) { + + PyObject *temp; + int result; + + temp = PyObject_CallFunction( + NP_GREATER_EQUAL, "OO", + (PyArrayObject*)left_val, + right_val); + + result = (int)PyInt_AsLong(temp); + Py_DECREF(temp); + return result; +} + +char *str_uppercase(char *str) { + if (str) { + int i, len=strlen(str); + char *result; + if((result = malloc((len + 1)*sizeof(char))) == NULL) { + return (char *)PyErr_NoMemory(); + } + strcpy(result, str); + + for (i=0;i +#include + + +int get_freq_group(int freq) { return (freq/1000)*1000; } + +static asfreq_info NULL_AF_INFO; + +/********************************************************* +** Python callbacks. These functions must be called by ** +** the module __init__ script ** +*********************************************************/ + +static PyObject *DateFromString = NULL; +PyObject * +set_callback_DateFromString(PyObject *dummy, PyObject *args) { + return set_callback(args, &DateFromString); +} + +static PyObject *DateTimeFromString = NULL; +PyObject * +set_callback_DateTimeFromString(PyObject *dummy, PyObject *args) { + return set_callback(args, &DateTimeFromString); +} + +//DERIVED FROM mx.DateTime +/* +===================================================== +== Functions in the following section are borrowed == +== from mx.DateTime, and in many cases slightly == +== modified == +===================================================== +*/ + +#define Py_AssertWithArg(x,errortype,errorstr,a1) {if (!(x)) {PyErr_Format(errortype,errorstr,a1);goto onError;}} +#define Py_Error(errortype,errorstr) {PyErr_SetString(errortype,errorstr);goto onError;} + + /* Error Exception objects */ +static PyObject *DateCalc_Error; +static PyObject *DateCalc_RangeError; + +#define GREGORIAN_CALENDAR 0 +#define JULIAN_CALENDAR 1 + +#define SECONDS_PER_DAY ((double) 86400.0) + +/* Table with day offsets for each month (0-based, without and with leap) */ +static int month_offset[2][13] = { + { 0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365 }, + { 0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366 } +}; + +/* Table of number of days in a month (0-based, without and with leap) */ +static int days_in_month[2][12] = { + { 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 }, + { 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 } +}; + +struct date_info { + long absdate; + double abstime; + + double second; + int minute; + int hour; + int day; + int month; + int quarter; + int year; + int day_of_week; + int day_of_year; + int calendar; +}; + + +/* Return 1/0 iff year points to a leap year in calendar. */ +static +int dInfoCalc_Leapyear(register long year, + int calendar) +{ + if (calendar == GREGORIAN_CALENDAR) { + return (year % 4 == 0) && ((year % 100 != 0) || (year % 400 == 0)); + } else { + return (year % 4 == 0); + } +} + +static +int dInfoCalc_ISOWeek(struct date_info *dinfo) +{ + int week; + + /* Estimate */ + week = (dinfo->day_of_year-1) - dinfo->day_of_week + 3; + if (week >= 0) week = week / 7 + 1; + + /* Verify */ + if (week < 0) { + /* The day lies in last week of the previous year */ + if ((week > -2) || + (week == -2 && dInfoCalc_Leapyear(dinfo->year-1, dinfo->calendar))) + week = 53; + else + week = 52; + } else if (week == 53) { + /* Check if the week belongs to year or year+1 */ + if (31-dinfo->day + dinfo->day_of_week < 3) { + week = 1; + } + } + + return week; +} + + +/* Return the day of the week for the given absolute date. */ +static +int dInfoCalc_DayOfWeek(register long absdate) +{ + int day_of_week; + + if (absdate >= 1) { + day_of_week = (absdate - 1) % 7; + } else { + day_of_week = 6 - ((-absdate) % 7); + } + return day_of_week; +} + +/* Return the year offset, that is the absolute date of the day + 31.12.(year-1) in the given calendar. + + Note: + For the Julian calendar we shift the absdate (which is measured + using the Gregorian Epoch) value by two days because the Epoch + (0001-01-01) in the Julian calendar lies 2 days before the Epoch in + the Gregorian calendar. */ +static +int dInfoCalc_YearOffset(register long year, + int calendar) +{ + year--; + if (calendar == GREGORIAN_CALENDAR) { + if (year >= 0 || -1/4 == -1) + return year*365 + year/4 - year/100 + year/400; + else + return year*365 + (year-3)/4 - (year-99)/100 + (year-399)/400; + } + else if (calendar == JULIAN_CALENDAR) { + if (year >= 0 || -1/4 == -1) + return year*365 + year/4 - 2; + else + return year*365 + (year-3)/4 - 2; + } + Py_Error(DateCalc_Error, "unknown calendar"); + onError: + return -1; +} + + +/* Set the instance's value using the given date and time. calendar + may be set to the flags: GREGORIAN_CALENDAR, + JULIAN_CALENDAR to indicate the calendar to be used. */ + +static +int dInfoCalc_SetFromDateAndTime(struct date_info *dinfo, + int year, + int month, + int day, + int hour, + int minute, + double second, + int calendar) +{ + + /* Calculate the absolute date */ + { + int leap; + long yearoffset,absdate; + + /* Range check */ + Py_AssertWithArg(year > -(INT_MAX / 366) && year < (INT_MAX / 366), + DateCalc_RangeError, + "year out of range: %i", + year); + + /* Is it a leap year ? */ + leap = dInfoCalc_Leapyear(year,calendar); + + /* Negative month values indicate months relative to the years end */ + if (month < 0) month += 13; + Py_AssertWithArg(month >= 1 && month <= 12, + DateCalc_RangeError, + "month out of range (1-12): %i", + month); + + /* Negative values indicate days relative to the months end */ + if (day < 0) day += days_in_month[leap][month - 1] + 1; + Py_AssertWithArg(day >= 1 && day <= days_in_month[leap][month - 1], + DateCalc_RangeError, + "day out of range: %i", + day); + + yearoffset = dInfoCalc_YearOffset(year,calendar); + if (PyErr_Occurred()) goto onError; + + absdate = day + month_offset[leap][month - 1] + yearoffset; + + dinfo->absdate = absdate; + + dinfo->year = year; + dinfo->month = month; + dinfo->quarter = ((month-1)/3)+1; + dinfo->day = day; + + dinfo->day_of_week = dInfoCalc_DayOfWeek(absdate); + dinfo->day_of_year = (short)(absdate - yearoffset); + + dinfo->calendar = calendar; + } + + /* Calculate the absolute time */ + { + Py_AssertWithArg(hour >= 0 && hour <= 23, + DateCalc_RangeError, + "hour out of range (0-23): %i", + hour); + Py_AssertWithArg(minute >= 0 && minute <= 59, + DateCalc_RangeError, + "minute out of range (0-59): %i", + minute); + Py_AssertWithArg(second >= (double)0.0 && + (second < (double)60.0 || + (hour == 23 && minute == 59 && + second < (double)61.0)), + DateCalc_RangeError, + "second out of range (0.0 - <60.0; <61.0 for 23:59): %f", + second); + + dinfo->abstime = (double)(hour*3600 + minute*60) + second; + + dinfo->hour = hour; + dinfo->minute = minute; + dinfo->second = second; + } + return 0; + onError: + return -1; +} + +static int monthToQuarter(int month) { return ((month-1)/3)+1; } + +/* Sets the date part of the date_info struct using the indicated + calendar. + + XXX This could also be done using some integer arithmetics rather + than with this iterative approach... */ +static +int dInfoCalc_SetFromAbsDate(register struct date_info *dinfo, + long absdate, + int calendar) +{ + register long year; + long yearoffset; + int leap,dayoffset; + int *monthoffset; + + /* Approximate year */ + if (calendar == GREGORIAN_CALENDAR) { + year = (long)(((double)absdate) / 365.2425); + } else if (calendar == JULIAN_CALENDAR) { + year = (long)(((double)absdate) / 365.25); + } else { + Py_Error(DateCalc_Error, "unknown calendar"); + } + if (absdate > 0) year++; + + /* Apply corrections to reach the correct year */ + while (1) { + /* Calculate the year offset */ + yearoffset = dInfoCalc_YearOffset(year,calendar); + if (PyErr_Occurred()) + goto onError; + + /* Backward correction: absdate must be greater than the + yearoffset */ + if (yearoffset >= absdate) { + year--; + continue; + } + + dayoffset = absdate - yearoffset; + leap = dInfoCalc_Leapyear(year,calendar); + + /* Forward correction: non leap years only have 365 days */ + if (dayoffset > 365 && !leap) { + year++; + continue; + } + break; + } + + dinfo->year = year; + dinfo->calendar = calendar; + + /* Now iterate to find the month */ + monthoffset = month_offset[leap]; + { + register int month; + + for (month = 1; month < 13; month++) { + if (monthoffset[month] >= dayoffset) + break; + } + + dinfo->month = month; + dinfo->quarter = monthToQuarter(month); + dinfo->day = dayoffset - month_offset[leap][month-1]; + } + + + dinfo->day_of_week = dInfoCalc_DayOfWeek(absdate); + dinfo->day_of_year = dayoffset; + dinfo->absdate = absdate; + + return 0; + + onError: + return -1; +} + +/* Sets the time part of the DateTime object. */ +static +int dInfoCalc_SetFromAbsTime(struct date_info *dinfo, + double abstime) +{ + int inttime; + int hour,minute; + double second; + + inttime = (int)abstime; + hour = inttime / 3600; + minute = (inttime % 3600) / 60; + second = abstime - (double)(hour*3600 + minute*60); + + dinfo->hour = hour; + dinfo->minute = minute; + dinfo->second = second; + + dinfo->abstime = abstime; + + return 0; +} + +/* Set the instance's value using the given date and time. calendar + may be set to the flags: GREGORIAN_CALENDAR, JULIAN_CALENDAR to + indicate the calendar to be used. */ +static +int dInfoCalc_SetFromAbsDateTime(struct date_info *dinfo, + long absdate, + double abstime, + int calendar) +{ + + /* Bounds check */ + Py_AssertWithArg(abstime >= 0.0 && abstime <= SECONDS_PER_DAY, + DateCalc_Error, + "abstime out of range (0.0 - 86400.0): %f", + abstime); + + /* Calculate the date */ + if (dInfoCalc_SetFromAbsDate(dinfo, + absdate, + calendar)) + goto onError; + + /* Calculate the time */ + if (dInfoCalc_SetFromAbsTime(dinfo, + abstime)) + goto onError; + + return 0; + onError: + return -1; +} + +/* +==================================================== +== End of section borrowed from mx.DateTime == +==================================================== +*/ + + + + + +/////////////////////////////////////////////////////////////////////// + +// helpers for frequency conversion routines // + +static long DtoB_weekday(long fromDate) { return (((fromDate) / 7) * 5) + (fromDate)%7; } + +static long DtoB_WeekendToMonday(long absdate, int day_of_week) { + + if (day_of_week > 4) { + //change to Monday after weekend + absdate += (7 - day_of_week); + } + return DtoB_weekday(absdate); +} + +static long DtoB_WeekendToFriday(long absdate, int day_of_week) { + + if (day_of_week > 4) { + //change to friday before weekend + absdate -= (day_of_week - 4); + } + return DtoB_weekday(absdate); +} + +static long absdate_from_ymd(int y, int m, int d) { + struct date_info tempDate; + if (dInfoCalc_SetFromDateAndTime(&tempDate, y, m, d, 0, 0, 0, GREGORIAN_CALENDAR)) return INT_ERR_CODE; + return tempDate.absdate; +} + + +/////////////////////////////////////////////// + +// frequency specifc conversion routines +// each function must take an integer fromDate and a char relation ('B' or 'A' for 'BEFORE' or 'AFTER') + +//************ FROM DAILY *************** + +static long asfreq_DtoA(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + if (dinfo.month > af_info->to_a_year_end) { return (long)(dinfo.year + 1); } + else { return (long)(dinfo.year); } +} + +static long DtoQ_yq(long fromDate, asfreq_info *af_info, + int *year, int *quarter) { + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + if (af_info->to_q_year_end != 12) { + dinfo.month -= af_info->to_q_year_end; + if (dinfo.month <= 0) { dinfo.month += 12; } + else { dinfo.year += 1; } + dinfo.quarter = monthToQuarter(dinfo.month); + } + + *year = dinfo.year; + *quarter = dinfo.quarter; + + return 0; +} + + +static long asfreq_DtoQ(long fromDate, char relation, asfreq_info *af_info) { + + int year, quarter; + + if (DtoQ_yq(fromDate, af_info, &year, &quarter) == INT_ERR_CODE) + { return INT_ERR_CODE; } + + return (long)((year - 1) * 4 + quarter); +} + +static long asfreq_DtoM(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + return (long)((dinfo.year - 1) * 12 + dinfo.month); +} + +static long asfreq_DtoW(long fromDate, char relation, asfreq_info *af_info) { + return (fromDate - (1 + af_info->to_week_end))/7 + 1; +} + +static long asfreq_DtoB(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (relation == 'B') { + return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); + } else { + return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); + } +} + +static long asfreq_DtoB_forConvert(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (dinfo.day_of_week > 4) { + return -1; + } else { + return DtoB_weekday(fromDate); + } +} + +// needed for getDateInfo function +static long asfreq_DtoD(long fromDate, char relation, asfreq_info *af_info) { return fromDate; } + +static long asfreq_DtoHIGHFREQ(long fromDate, char relation, long periodsPerDay) { + if (fromDate >= HIGHFREQ_ORIG) { + if (relation == 'B') { return (fromDate - HIGHFREQ_ORIG)*(periodsPerDay) + 1; } + else { return (fromDate - HIGHFREQ_ORIG + 1)*(periodsPerDay); } + } else { return -1; } +} + +static long asfreq_DtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoHIGHFREQ(fromDate, relation, 24); } +static long asfreq_DtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoHIGHFREQ(fromDate, relation, 24*60); } +static long asfreq_DtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoHIGHFREQ(fromDate, relation, 24*60*60); } + +//************ FROM SECONDLY *************** + +static long asfreq_StoD(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/(60*60*24) + HIGHFREQ_ORIG; } + +static long asfreq_StoA(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoA(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_StoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_StoM(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoM(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_StoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_StoB(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_StoB_forConvert(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB_forConvert(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_StoT(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/60 + 1; } +static long asfreq_StoH(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/(60*60) + 1; } + +//************ FROM MINUTELY *************** + +static long asfreq_TtoD(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/(60*24) + HIGHFREQ_ORIG; } + +static long asfreq_TtoA(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoA(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_TtoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_TtoM(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoM(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_TtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_TtoB(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_TtoB_forConvert(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB_forConvert(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_TtoH(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/60 + 1; } +static long asfreq_TtoS(long fromDate, char relation, asfreq_info *af_info) { + if (relation == 'B') { return fromDate*60 - 59; } + else { return fromDate*60; }} + +//************ FROM HOURLY *************** + +static long asfreq_HtoD(long fromDate, char relation, asfreq_info *af_info) + { return (fromDate - 1)/24 + HIGHFREQ_ORIG; } +static long asfreq_HtoA(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoA(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_HtoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_HtoM(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoM(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_HtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } +static long asfreq_HtoB(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_HtoB_forConvert(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoB_forConvert(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +// calculation works out the same as TtoS, so we just call that function for HtoT +static long asfreq_HtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_TtoS(fromDate, relation, &NULL_AF_INFO); } +static long asfreq_HtoS(long fromDate, char relation, asfreq_info *af_info) { + if (relation == 'B') { return fromDate*60*60 - 60*60 + 1; } + else { return fromDate*60*60; }} + +//************ FROM BUSINESS *************** + +static long asfreq_BtoD(long fromDate, char relation, asfreq_info *af_info) + { return ((fromDate-1)/5)*7 + (fromDate-1)%5 + 1; } + +static long asfreq_BtoA(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoA(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } + +static long asfreq_BtoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } + +static long asfreq_BtoM(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoM(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_BtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } + +static long asfreq_BtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoH(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_BtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoT(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +static long asfreq_BtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoS(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +//************ FROM WEEKLY *************** + +static long asfreq_WtoD(long fromDate, char relation, asfreq_info *af_info) { + if (relation == 'B') { return fromDate * 7 - 6 + af_info->from_week_end;} + else { return fromDate * 7 + af_info->from_week_end; } +} + +static long asfreq_WtoA(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoA(asfreq_WtoD(fromDate, 'A', af_info), relation, af_info); } +static long asfreq_WtoQ(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoQ(asfreq_WtoD(fromDate, 'A', af_info), relation, af_info); } +static long asfreq_WtoM(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoM(asfreq_WtoD(fromDate, 'A', af_info), relation, &NULL_AF_INFO); } + +static long asfreq_WtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_WtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_WtoB(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_WtoD(fromDate, relation, af_info), + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } + else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } +} + +static long asfreq_WtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoH(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_WtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoT(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_WtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoS(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } + +//************ FROM MONTHLY *************** + +static void MtoD_ym(long fromDate, long *y, long *m) { + *y = (fromDate - 1) / 12 + 1; + *m = fromDate - 12 * (*y) - 1; +} + +static long asfreq_MtoD(long fromDate, char relation, asfreq_info *af_info) { + + long y, m, absdate; + + if (relation == 'B') { + MtoD_ym(fromDate, &y, &m); + if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; + return absdate; + } else { + MtoD_ym(fromDate+1, &y, &m); + if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; + return absdate-1; + } +} + +static long asfreq_MtoA(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoA(asfreq_MtoD(fromDate, 'A', &NULL_AF_INFO), relation, af_info); } + +static long asfreq_MtoQ(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoQ(asfreq_MtoD(fromDate, 'A', &NULL_AF_INFO), relation, af_info); } + +static long asfreq_MtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } + +static long asfreq_MtoB(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } + else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } +} + +static long asfreq_MtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoH(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_MtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoT(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } +static long asfreq_MtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoS(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } + +//************ FROM QUARTERLY *************** + +static void QtoD_ym(long fromDate, long *y, long *m, asfreq_info *af_info) { + + *y = (fromDate - 1) / 4 + 1; + *m = (fromDate + 4) * 3 - 12 * (*y) - 2; + + if (af_info->from_q_year_end != 12) { + *m += af_info->from_q_year_end; + if (*m > 12) { *m -= 12; } + else { *y -= 1; } + } +} + +static long asfreq_QtoD(long fromDate, char relation, asfreq_info *af_info) { + + long y, m, absdate; + + if (relation == 'B') { + QtoD_ym(fromDate, &y, &m, af_info); + if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; + return absdate; + } else { + QtoD_ym(fromDate+1, &y, &m, af_info); + if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; + return absdate - 1; + } +} + +static long asfreq_QtoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_QtoA(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoA(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_QtoM(long fromDate, char relation, asfreq_info *af_info) { + return asfreq_DtoM(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } + +static long asfreq_QtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_QtoB(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_QtoD(fromDate, relation, af_info), + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } + else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } +} + + +static long asfreq_QtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoH(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_QtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoT(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_QtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoS(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } + + +//************ FROM ANNUAL *************** + +static long asfreq_AtoD(long fromDate, char relation, asfreq_info *af_info) { + long absdate, year, final_adj; + int month = (af_info->from_a_year_end + 1) % 12; + + if (relation == 'B') { + if (af_info->from_a_year_end == 12) {year = fromDate;} + else {year = fromDate - 1;} + final_adj = 0; + } else { + if (af_info->from_a_year_end == 12) {year = fromDate+1;} + else {year = fromDate;} + final_adj = -1; + } + absdate = absdate_from_ymd(year, month, 1); + if (absdate == INT_ERR_CODE) return INT_ERR_CODE; + return absdate + final_adj; +} + +static long asfreq_AtoA(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoA(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_AtoQ(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoQ(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_AtoM(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoM(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_AtoW(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoW(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } + +static long asfreq_AtoB(long fromDate, char relation, asfreq_info *af_info) { + + struct date_info dinfo; + if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_AtoD(fromDate, relation, af_info), + GREGORIAN_CALENDAR)) return INT_ERR_CODE; + + if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } + else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } +} + +static long asfreq_AtoH(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoH(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_AtoT(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoT(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } +static long asfreq_AtoS(long fromDate, char relation, asfreq_info *af_info) + { return asfreq_DtoS(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } + +static long nofunc(long fromDate, char relation, asfreq_info *af_info) { return -1; } + +// end of frequency specific conversion routines + +// return a pointer to appropriate conversion function +long (*get_asfreq_func(int fromFreq, int toFreq, int forConvert))(long, char, asfreq_info*) { + + int fromGroup = get_freq_group(fromFreq); + int toGroup = get_freq_group(toFreq); + + if (fromGroup == FR_UND) { fromGroup = FR_DAY; } + + switch(fromGroup) + { + case FR_ANN: + switch(toGroup) + { + case FR_ANN: return &asfreq_AtoA; + case FR_QTR: return &asfreq_AtoQ; + case FR_MTH: return &asfreq_AtoM; + case FR_WK: return &asfreq_AtoW; + case FR_BUS: return &asfreq_AtoB; + case FR_DAY: return &asfreq_AtoD; + case FR_HR: return &asfreq_AtoH; + case FR_MIN: return &asfreq_AtoT; + case FR_SEC: return &asfreq_AtoS; + default: return &nofunc; + } + + case FR_QTR: + switch(toGroup) + { + case FR_ANN: return &asfreq_QtoA; + case FR_QTR: return &asfreq_QtoQ; + case FR_MTH: return &asfreq_QtoM; + case FR_WK: return &asfreq_QtoW; + case FR_BUS: return &asfreq_QtoB; + case FR_DAY: return &asfreq_QtoD; + case FR_HR: return &asfreq_QtoH; + case FR_MIN: return &asfreq_QtoT; + case FR_SEC: return &asfreq_QtoS; + default: return &nofunc; + } + + case FR_MTH: + switch(toGroup) + { + case FR_ANN: return &asfreq_MtoA; + case FR_QTR: return &asfreq_MtoQ; + case FR_WK: return &asfreq_MtoW; + case FR_BUS: return &asfreq_MtoB; + case FR_DAY: return &asfreq_MtoD; + case FR_HR: return &asfreq_MtoH; + case FR_MIN: return &asfreq_MtoT; + case FR_SEC: return &asfreq_MtoS; + default: return &nofunc; + } + + case FR_WK: + switch(toGroup) + { + case FR_ANN: return &asfreq_WtoA; + case FR_QTR: return &asfreq_WtoQ; + case FR_MTH: return &asfreq_WtoM; + case FR_WK: return &asfreq_WtoW; + case FR_BUS: return &asfreq_WtoB; + case FR_DAY: return &asfreq_WtoD; + case FR_HR: return &asfreq_WtoH; + case FR_MIN: return &asfreq_WtoT; + case FR_SEC: return &asfreq_WtoS; + default: return &nofunc; + } + + case FR_BUS: + switch(toGroup) + { + case FR_ANN: return &asfreq_BtoA; + case FR_QTR: return &asfreq_BtoQ; + case FR_MTH: return &asfreq_BtoM; + case FR_WK: return &asfreq_BtoW; + case FR_DAY: return &asfreq_BtoD; + case FR_HR: return &asfreq_BtoH; + case FR_MIN: return &asfreq_BtoT; + case FR_SEC: return &asfreq_BtoS; + default: return &nofunc; + } + + case FR_DAY: + switch(toGroup) + { + case FR_ANN: return &asfreq_DtoA; + case FR_QTR: return &asfreq_DtoQ; + case FR_MTH: return &asfreq_DtoM; + case FR_WK: return &asfreq_DtoW; + case FR_BUS: + if (forConvert) { return &asfreq_DtoB_forConvert; } + else { return &asfreq_DtoB; } + case FR_DAY: return &asfreq_DtoD; + case FR_HR: return &asfreq_DtoH; + case FR_MIN: return &asfreq_DtoT; + case FR_SEC: return &asfreq_DtoS; + default: return &nofunc; + } + + case FR_HR: + switch(toGroup) + { + case FR_ANN: return &asfreq_HtoA; + case FR_QTR: return &asfreq_HtoQ; + case FR_MTH: return &asfreq_HtoM; + case FR_WK: return &asfreq_HtoW; + case FR_BUS: + if (forConvert) { return &asfreq_HtoB_forConvert; } + else { return &asfreq_HtoB; } + case FR_DAY: return &asfreq_HtoD; + case FR_MIN: return &asfreq_HtoT; + case FR_SEC: return &asfreq_HtoS; + default: return &nofunc; + } + + case FR_MIN: + switch(toGroup) + { + case FR_ANN: return &asfreq_TtoA; + case FR_QTR: return &asfreq_TtoQ; + case FR_MTH: return &asfreq_TtoM; + case FR_WK: return &asfreq_TtoW; + case FR_BUS: + if (forConvert) { return &asfreq_TtoB_forConvert; } + else { return &asfreq_TtoB; } + case FR_DAY: return &asfreq_TtoD; + case FR_HR: return &asfreq_TtoH; + case FR_SEC: return &asfreq_TtoS; + default: return &nofunc; + } + + case FR_SEC: + switch(toGroup) + { + case FR_ANN: return &asfreq_StoA; + case FR_QTR: return &asfreq_StoQ; + case FR_MTH: return &asfreq_StoM; + case FR_WK: return &asfreq_StoW; + case FR_BUS: + if (forConvert) { return &asfreq_StoB_forConvert; } + else { return &asfreq_StoB; } + case FR_DAY: return &asfreq_StoD; + case FR_HR: return &asfreq_StoH; + case FR_MIN: return &asfreq_StoT; + default: return &nofunc; + } + default: return &nofunc; + } +} + +static int calc_a_year_end(int freq, int group) { + int result = (freq - group) % 12; + if (result == 0) {return 12;} + else {return result;} +} + +static int calc_week_end(int freq, int group) { + return freq - group; +} + +void get_asfreq_info(int fromFreq, int toFreq, asfreq_info *af_info) { + + int fromGroup = get_freq_group(fromFreq); + int toGroup = get_freq_group(toFreq); + + switch(fromGroup) + { + case FR_WK: { + af_info->from_week_end = calc_week_end(fromFreq, fromGroup); + } break; + case FR_ANN: { + af_info->from_a_year_end = calc_a_year_end(fromFreq, fromGroup); + } break; + case FR_QTR: { + af_info->from_q_year_end = calc_a_year_end(fromFreq, fromGroup); + } break; + + } + + switch(toGroup) + { + case FR_WK: { + af_info->to_week_end = calc_week_end(toFreq, toGroup); + } break; + case FR_ANN: { + af_info->to_a_year_end = calc_a_year_end(toFreq, toGroup); + } break; + case FR_QTR: { + af_info->to_q_year_end = calc_a_year_end(toFreq, toGroup); + } break; + } + +} + +static double getAbsTime(int freq, long dailyDate, long originalDate) { + + long startOfDay, periodsPerDay; + + switch(freq) + { + case FR_HR: + periodsPerDay = 24; + break; + case FR_MIN: + periodsPerDay = 24*60; + break; + case FR_SEC: + periodsPerDay = 24*60*60; + break; + default: + return 0; + } + + startOfDay = asfreq_DtoHIGHFREQ(dailyDate, 'B', periodsPerDay); + return (24*60*60)*((double)(originalDate - startOfDay))/((double)periodsPerDay); +} + +/************************************************************ +** Date type definition +************************************************************/ + +typedef struct { + PyObject_HEAD + int freq; /* frequency of date */ + int value; /* integer representation of date */ + PyObject* cached_vals; +} DateObject; + +/* Forward declarations */ +static PyTypeObject DateType; +#define DateObject_Check(op) PyObject_TypeCheck(op, &DateType) + +static void +DateObject_dealloc(DateObject* self) { + Py_XDECREF(self->cached_vals); + self->ob_type->tp_free((PyObject*)self); +} + + +static PyObject *freq_dict, *freq_dict_rev, *freq_constants; + +#define DICT_SETINT_STRKEY(dict, key, val) \ + {PyObject *pyval = PyInt_FromLong(val); \ + PyDict_SetItemString(dict, key, pyval); \ + Py_DECREF(pyval); } + +#define ADD_FREQ_CONSTANT(const_name, val) \ + DICT_SETINT_STRKEY(freq_constants, const_name, val) + +#define INIT_FREQ(const_name, key, aliases) \ + {PyObject *pykey = PyInt_FromLong(key); \ + PyDict_SetItem(freq_dict, pykey, aliases); \ + PyDict_SetItemString(freq_constants, const_name, pykey); \ + Py_DECREF(pykey); \ + Py_DECREF(aliases); } + + +static int init_freq_group(int num_items, int num_roots, int base_const, + char item_abbrevs[][2][10], char group_prefixes[][15], + char item_const_names[][15]) { + + int i; + + for (i = 0; i < num_items; i++) { + + PyObject *aliases; + int j, size, k; + + if (i == 0) { k = 3; } else { k = 2; } + + size = num_roots * k; + + aliases = PyTuple_New(size); + + for (j = 0; j < num_roots; j++) { + PyObject *alias_v1, *alias_v2; + char *root, *alt; + + if ((root = malloc((30) * sizeof(char))) == NULL) return INT_ERR_CODE; + if ((alt = malloc((30) * sizeof(char))) == NULL) return INT_ERR_CODE; + + strcpy(root, group_prefixes[j]); + strcpy(alt, group_prefixes[j]); + + if (i == 0) { + PyObject *alias = PyString_FromString(root); + PyTuple_SET_ITEM(aliases, j*k + 2, alias); + } + + strcat(root, "-"); + strcat(root, item_abbrevs[i][0]); + strcat(alt, "-"); + strcat(alt, item_abbrevs[i][1]); + + alias_v1 = PyString_FromString(root); + alias_v2 = PyString_FromString(alt); + + free(root); + free(alt); + + PyTuple_SET_ITEM(aliases, j*k, alias_v1); + PyTuple_SET_ITEM(aliases, j*k + 1, alias_v2); + } + + INIT_FREQ(item_const_names[i], base_const+i, aliases); + } + + return 0; +} + +/* take a dictionary with integer keys and tuples of strings for values, + and populate a dictionary with all the strings as keys and integers + for values */ +static int reverse_dict(PyObject *source, PyObject *dest) { + + PyObject *key, *value; + + Py_ssize_t pos = 0; + + while (PyDict_Next(source, &pos, &key, &value)) { + PyObject *tuple_iter; + PyObject *item; + + if((tuple_iter = PyObject_GetIter(value)) == NULL) return INT_ERR_CODE; + + while ((item = PyIter_Next(tuple_iter)) != NULL) { + PyDict_SetItem(dest, item, key); + Py_DECREF(item); + } + Py_DECREF(tuple_iter); + } + return 0; +} + +static int build_freq_dict(void) { + + char ANN_prefixes[8][15] = { "A", "Y", "ANN", "ANNUAL", "ANNUALLY", + "YR", "YEAR", "YEARLY" }; + + char QTRE_prefixes[8][15] = { "Q", "QTR", "QUARTER", "QUARTERLY", "Q-E", + "QTR-E", "QUARTER-E", "QUARTERLY-E"}; + char QTRS_prefixes[4][15] = { "Q-S", "QTR-S", "QUARTER-S", "QUARTERLY-S" }; + + char WK_prefixes[4][15] = { "W", "WK", "WEEK", "WEEKLY" }; + + /* Note: order of this array must match up with how the Annual + frequency constants are lined up */ + char month_names[12][2][10] = { + { "DEC", "DECEMBER" }, + { "JAN", "JANUARY" }, + { "FEB", "FEBRUARY" }, + { "MAR", "MARCH" }, + { "APR", "APRIL" }, + { "MAY", "MAY" }, + { "JUN", "JUNE" }, + { "JUL", "JULY" }, + { "AUG", "AUGUST" }, + { "SEP", "SEPTEMBER" }, + { "OCT", "OCTOBER" }, + { "NOV", "NOVEMBER" }}; + + char day_names[7][2][10] = { + { "SUN", "SUNDAY" }, + { "MON", "MONDAY" }, + { "TUE", "TUESDAY" }, + { "WED", "WEDNESDAY" }, + { "THU", "THURSDAY" }, + { "FRI", "FRIDAY" }, + { "SAT", "SATURDAY" }}; + + char ANN_const_names[12][15] = { + "FR_ANNDEC", + "FR_ANNJAN", + "FR_ANNFEB", + "FR_ANNMAR", + "FR_ANNAPR", + "FR_ANNMAY", + "FR_ANNJUN", + "FR_ANNJUL", + "FR_ANNAUG", + "FR_ANNSEP", + "FR_ANNOCT", + "FR_ANNNOV"}; + + char QTRE_const_names[12][15] = { + "FR_QTREDEC", + "FR_QTREJAN", + "FR_QTREFEB", + "FR_QTREMAR", + "FR_QTREAPR", + "FR_QTREMAY", + "FR_QTREJUN", + "FR_QTREJUL", + "FR_QTREAUG", + "FR_QTRESEP", + "FR_QTREOCT", + "FR_QTRENOV"}; + + char QTRS_const_names[12][15] = { + "FR_QTRSDEC", + "FR_QTRSJAN", + "FR_QTRSFEB", + "FR_QTRSMAR", + "FR_QTRSAPR", + "FR_QTRSMAY", + "FR_QTRSJUN", + "FR_QTRSJUL", + "FR_QTRSAUG", + "FR_QTRSSEP", + "FR_QTRSOCT", + "FR_QTRSNOV"}; + + char WK_const_names[7][15] = { + "FR_WKSUN", + "FR_WKMON", + "FR_WKTUE", + "FR_WKWED", + "FR_WKTHU", + "FR_WKFRI", + "FR_WKSAT"}; + + PyObject *aliases; + + freq_dict = PyDict_New(); + freq_dict_rev = PyDict_New(); + freq_constants = PyDict_New(); + + aliases = Py_BuildValue("(ssss)", "M", "MTH", "MONTH", "MONTHLY"); + INIT_FREQ("FR_MTH", FR_MTH, aliases); + + aliases = Py_BuildValue("(ssss)", "B", "BUS", "BUSINESS", "BUSINESSLY"); + INIT_FREQ("FR_BUS", FR_BUS, aliases); + + aliases = Py_BuildValue("(ssss)", "D", "DAY", "DLY", "DAILY"); + INIT_FREQ("FR_DAY", FR_DAY, aliases); + + aliases = Py_BuildValue("(sssss)", "H", "HR", "HOUR", "HRLY", "HOURLY"); + INIT_FREQ("FR_HR", FR_HR, aliases); + + aliases = Py_BuildValue("(ssss)", "T", "MIN", "MINUTE", "MINUTELY"); + INIT_FREQ("FR_MIN", FR_MIN, aliases); + + aliases = Py_BuildValue("(ssss)", "S", "SEC", "SECOND", "SECONDLY"); + INIT_FREQ("FR_SEC", FR_SEC, aliases); + + aliases = Py_BuildValue("(ssss)", "U", "UND", "UNDEF", "UNDEFINED"); + INIT_FREQ("FR_UND", FR_UND, aliases); + + ADD_FREQ_CONSTANT("FR_ANN", FR_ANN); + + if(init_freq_group(12, 8, FR_ANN, + month_names, ANN_prefixes, ANN_const_names) == INT_ERR_CODE) { + return INT_ERR_CODE; + } + + ADD_FREQ_CONSTANT("FR_QTR", FR_QTR); + + if(init_freq_group(12, 8, FR_QTREDEC, + month_names, QTRE_prefixes, QTRE_const_names) == INT_ERR_CODE) { + return INT_ERR_CODE; + } + + if(init_freq_group(12, 4, FR_QTRSDEC, + month_names, QTRS_prefixes, QTRS_const_names) == INT_ERR_CODE) { + return INT_ERR_CODE; + } + + ADD_FREQ_CONSTANT("FR_WK", FR_WK); + + if(init_freq_group(7, 4, FR_WK, + day_names, WK_prefixes, WK_const_names) == INT_ERR_CODE) { + return INT_ERR_CODE; + } + + if(reverse_dict(freq_dict, freq_dict_rev) == INT_ERR_CODE) { + return INT_ERR_CODE; + } + + return 0; +} + + +/* take user specified frequency and convert to int representation + of the frequency */ +int check_freq(PyObject *freq_spec) { + + if (PyInt_Check(freq_spec)) { + return (int)PyInt_AsLong(freq_spec); + } else if (PyString_Check(freq_spec)) { + char *freq_str, *freq_str_uc; + PyObject *freq_val; + + freq_str = PyString_AsString(freq_spec); + if((freq_str_uc = str_uppercase(freq_str)) == NULL) {return INT_ERR_CODE;} + + freq_val = PyDict_GetItemString(freq_dict_rev, freq_str_uc); + + free(freq_str_uc); + + if (freq_val == NULL) { + PyErr_SetString(PyExc_ValueError, "invalid frequency specification"); + return INT_ERR_CODE; + } else { + int ret_val = (int)PyInt_AsLong(freq_val); + return ret_val; + } + } else if (freq_spec == Py_None) { + return FR_UND; + } else { + int retval = (int)PyInt_AsLong(freq_spec); + if (PyErr_Occurred()) { + PyErr_SetString(PyExc_ValueError, "invalid frequency specification"); + return INT_ERR_CODE; + } else { return retval; } + } + +} + +static PyObject * +DateObject_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { + + DateObject *self; + + self = (DateObject*)type->tp_alloc(type, 0); + if (self != NULL) { + // initialize attributes that need initializing in here + self->freq = FR_UND; + self->value = -1; + } + + return (PyObject *)self; +} + +/* for use in C code */ +static DateObject * +DateObject_New(void) { + PyObject *dummy; + return (DateObject*)DateObject_new(&DateType, dummy, dummy); +} + +#define INIT_ERR(errortype, errmsg) PyErr_SetString(errortype,errmsg);return -1 + +static int +DateObject_init(DateObject *self, PyObject *args, PyObject *kwds) { + + PyObject *freq=NULL, *value=NULL, *datetime=NULL, *string=NULL; + char *INSUFFICIENT_MSG = "insufficient parameters to initialize Date"; + + int def_info=INT_ERR_CODE; + + int year=def_info, month=def_info, day=def_info, quarter=def_info, + hour=def_info, minute=def_info, second=def_info; + + int free_dt=0; + + static char *kwlist[] = {"freq", "value", "string", + "year", "month", "day", "quarter", + "hour", "minute", "second", + "datetime", NULL}; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "O|OOiiiiiiiO", kwlist, + &freq, &value, &string, + &year, &month, &day, &quarter, + &hour, &minute, &second, + &datetime)) return -1; + + if (PyObject_HasAttrString(freq, "freq")) { + PyObject *freq_attr = PyObject_GetAttrString(freq, "freq"); + self->freq = PyInt_AS_LONG(freq_attr); + Py_DECREF(freq_attr); + } else { + if((self->freq = check_freq(freq)) == INT_ERR_CODE) return -1; + } + + if ((value && PyString_Check(value)) || string) { + + PyObject *string_arg = PyTuple_New(1); + int freq_group = get_freq_group(self->freq); + + free_dt = 1; + + if (!string) { + string = value; + } + + PyTuple_SET_ITEM(string_arg, 0, string); + Py_INCREF(string); + + if (freq_group == FR_HR || + freq_group == FR_MIN || + freq_group == FR_SEC) + { datetime = PyEval_CallObject(DateTimeFromString, string_arg); } + else { datetime = PyEval_CallObject(DateFromString, string_arg); } + + Py_DECREF(string_arg); + + value = NULL; + } + + if (value) { + self->value = PyInt_AsLong(value); + } else { + + int freq_group = get_freq_group(self->freq); + + if (datetime) { + year=PyDateTime_GET_YEAR(datetime); + month=PyDateTime_GET_MONTH(datetime); + day=PyDateTime_GET_DAY(datetime); + hour=PyDateTime_DATE_GET_HOUR(datetime); + minute=PyDateTime_DATE_GET_MINUTE(datetime); + second=PyDateTime_DATE_GET_SECOND(datetime); + } + + if (!datetime) { + + // First, some basic checks..... + if (year == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + if (self->freq == FR_BUS || + self->freq == FR_DAY || + self->freq == FR_WK || + self->freq == FR_UND) { + if (month == def_info || day == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + + // if FR_BUS, check for week day + + } else if (self->freq == FR_MTH) { + if (month == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + } else if (freq_group == FR_QTR) { + if (quarter == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + } else if (self->freq == FR_SEC) { + if (month == def_info || + day == def_info || + second == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + if (hour == def_info) { + hour = second/3600; + minute = (second % 3600)/60; + second = second % 60; + } else if (minute == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + } else if (self->freq == FR_MIN) { + if (month == def_info || + day == def_info || + minute == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + if (hour == def_info) { + hour = minute/60; + minute = minute % 60; + } + } else if (self->freq == FR_HR) { + if (month == def_info || + day == def_info || + hour == def_info) { + INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); + } + } + + } + + if (self->freq == FR_SEC) { + long absdays, delta; + absdays = absdate_from_ymd(year, month, day); + delta = (absdays - HIGHFREQ_ORIG); + self->value = (int)(delta*86400 + hour*3600 + minute*60 + second + 1); + } else if (self->freq == FR_MIN) { + long absdays, delta; + absdays = absdate_from_ymd(year, month, day); + delta = (absdays - HIGHFREQ_ORIG); + self->value = (int)(delta*1440 + hour*60 + minute + 1); + } else if (self->freq == FR_HR) { + long absdays, delta; + if((absdays = absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; + delta = (absdays - HIGHFREQ_ORIG); + self->value = (int)(delta*24 + hour + 1); + } else if (self->freq == FR_DAY) { + if((self->value = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; + } else if (self->freq == FR_UND) { + if((self->value = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; + } else if (self->freq == FR_BUS) { + long weeks, days; + if((days = absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; + weeks = days/7; + self->value = (int)(days - weeks*2); + } else if (freq_group == FR_WK) { + int adj_ordinal, ordinal, day_adj; + if((ordinal = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; + day_adj = (7 - (self->freq - FR_WK)) % 7; + adj_ordinal = ordinal + ((7 - day_adj) - ordinal % 7) % 7; + self->value = adj_ordinal/7; + } else if (self->freq == FR_MTH) { + self->value = (year-1)*12 + month; + } else if (freq_group == FR_QTR) { + if ((self->freq - freq_group) > 12) { + // quarterly frequency with year determined by ending period + self->value = year*4 + quarter; + } else { + /* quarterly frequency with year determined by ending period + or has December year end*/ + self->value = (year-1)*4 + quarter; + } + } else if (freq_group == FR_ANN) { + self->value = year; + } + + } + + if (free_dt) { Py_DECREF(datetime); } + + return 0; +} + +static PyMemberDef DateObject_members[] = { + {"freq", T_INT, offsetof(DateObject, freq), 0, + "frequency"}, + {"value", T_INT, offsetof(DateObject, value), 0, + "integer representation of the Date"}, + {NULL} /* Sentinel */ +}; + +static char DateObject_toordinal_doc[] = +"Return the proleptic Gregorian ordinal of the date, where January 1 of\n" +"year 1 has ordinal 1"; +static PyObject * +DateObject_toordinal(DateObject* self) +{ + if (self->freq == FR_DAY) { + return PyInt_FromLong(self->value); + } else { + long (*toDaily)(long, char, asfreq_info*) = NULL; + asfreq_info af_info; + + toDaily = get_asfreq_func(self->freq, FR_DAY, 0); + get_asfreq_info(self->freq, FR_DAY, &af_info); + + return PyInt_FromLong(toDaily(self->value, 'A', &af_info)); + } +} + +static char DateObject_asfreq_doc[] = +"Returns a date converted to a specified frequency.\n\n" +":Parameters:\n" +" - freq : string/int\n" +" Frequency to convert the Date to. Accepts any valid frequency\n" +" specification (string or integer)\n" +" - relation :string *['After']*\n" +" Applies only when converting a lower frequency Date to a higher\n" +" frequency Date, or when converting a weekend Date to a business\n" +" frequency Date. Valid values are 'before', 'after', 'b', and 'a'."; +static PyObject * +DateObject_asfreq(DateObject *self, PyObject *args, PyObject *kwds) +{ + + PyObject *freq=NULL; + char *relation_raw=NULL; + char *relation_uc; + char relation; + int invalid_relation=0; + int toFreq; + int result_val; + DateObject *result = DateObject_New(); + + static char *kwlist[] = {"freq", "relation", NULL}; + + long (*asfreq_func)(long, char, asfreq_info*) = NULL; + asfreq_info af_info; + + if (! PyArg_ParseTupleAndKeywords(args, kwds, "O|s", kwlist, + &freq, &relation_raw)) return NULL; + + if(relation_raw) { + if (strlen(relation_raw) > 0) { + if((relation_uc = str_uppercase(relation_raw)) == NULL) + {return PyErr_NoMemory();} + + if (strcmp(relation_uc, "BEFORE") == 0 || + strcmp(relation_uc, "B") == 0 || + strcmp(relation_uc, "AFTER") == 0 || + strcmp(relation_uc, "A") == 0) { + relation = relation_uc[0]; + } else { invalid_relation=1; } + } else { + invalid_relation=1; + } + + if (invalid_relation) { + PyErr_SetString(PyExc_ValueError,"Invalid relation specification"); + return NULL; + } + } else { + relation = 'A'; + } + + if ((toFreq = check_freq(freq)) == INT_ERR_CODE) return NULL; + + get_asfreq_info(self->freq, toFreq, &af_info); + asfreq_func = get_asfreq_func(self->freq, toFreq, 0); + + result_val = asfreq_func(self->value, relation, &af_info); + + result->freq = toFreq; + result->value = result_val; + + return (PyObject*)result; + +} + +static char DateObject_strfmt_doc[] = +"Returns string representation of Date object according to format specified.\n\n" +":Parameters:\n" +" - fmt : string\n" +" Formatting string. Uses the same directives as in the time.strftime\n" +" function in the standard Python time module. In addition, a few other\n" +" directives are supported:\n" +" %q - the 'quarter' of the date\n" +" %f - Year without century as a decimal number [00,99]. The\n" +" 'year' in this case is the year of the date determined by\n" +" the year for the current quarter. This is the same as %y\n" +" unless the Date is one of the 'qtr-s' frequencies\n" +" %F - Year with century as a decimal number. The 'year' in this\n" +" case is the year of the date determined by the year for\n" +" the current quarter. This is the same as %Y unless the\n" +" Date is one of the 'qtr-s' frequencies\n"; +static PyObject * +DateObject_strfmt(DateObject *self, PyObject *args) +{ + + char *orig_fmt_str, *fmt_str; + char *result; + + int num_extra_fmts = 3; + + char extra_fmts[3][2][10] = {{"%q", "^`AB`^"}, + {"%f", "^`CD`^"}, + {"%F", "^`EF`^"}}; + + int extra_fmts_found[3] = {0,0,0}; + int extra_fmts_found_one = 0; + struct tm c_date; + struct date_info tempDate; + long absdate; + double abstime; + int i, result_len; + PyObject *py_result; + + long (*toDaily)(long, char, asfreq_info*) = NULL; + asfreq_info af_info; + + if (!PyArg_ParseTuple(args, "s:strfmt(fmt)", &orig_fmt_str)) return NULL; + + toDaily = get_asfreq_func(self->freq, FR_DAY, 0); + get_asfreq_info(self->freq, FR_DAY, &af_info); + + absdate = toDaily(self->value, 'A', &af_info); + abstime = getAbsTime(self->freq, absdate, self->value); + + if(dInfoCalc_SetFromAbsDateTime(&tempDate, absdate, abstime, + GREGORIAN_CALENDAR)) return NULL; + + // populate standard C date struct with info from our date_info struct + c_date.tm_sec = (int)tempDate.second; + c_date.tm_min = tempDate.minute; + c_date.tm_hour = tempDate.hour; + c_date.tm_mday = tempDate.day; + c_date.tm_mon = tempDate.month - 1; + c_date.tm_year = tempDate.year - 1900; + c_date.tm_wday = tempDate.day_of_week; + c_date.tm_yday = tempDate.day_of_year; + c_date.tm_isdst = -1; + + result_len = strlen(orig_fmt_str) + 50; + if ((result = malloc(result_len * sizeof(char))) == NULL) {return PyErr_NoMemory();} + + fmt_str = orig_fmt_str; + + // replace any special format characters with their place holder + for(i=0; i < num_extra_fmts; i++) { + char *special_loc; + if ((special_loc = strstr(fmt_str,extra_fmts[i][0])) != NULL) { + char *tmp_str = fmt_str; + fmt_str = str_replace(fmt_str, extra_fmts[i][0], + extra_fmts[i][1]); + /* only free the previous loop value if this is not the first + special format string found */ + if (extra_fmts_found_one) { free(tmp_str); } + + if (fmt_str == NULL) {return NULL;} + + extra_fmts_found[i] = 1; + extra_fmts_found_one = 1; + } + } + + strftime(result, result_len, fmt_str, &c_date); + if (extra_fmts_found_one) { free(fmt_str); } + + // replace any place holders with the appropriate value + for(i=0; i < num_extra_fmts; i++) { + if (extra_fmts_found[i]) { + char *tmp_str = result; + char *extra_str; + + if (strcmp(extra_fmts[i][0], "%q") == 0 || + strcmp(extra_fmts[i][0], "%f") == 0 || + strcmp(extra_fmts[i][0], "%F") == 0) { + + asfreq_info af_info; + int qtr_freq, year, quarter, year_len; + + if (get_freq_group(self->freq) == FR_QTR) { + qtr_freq = self->freq; + } else { qtr_freq = FR_QTR; } + get_asfreq_info(FR_DAY, qtr_freq, &af_info); + + if(DtoQ_yq(absdate, &af_info, &year, &quarter) == INT_ERR_CODE) + { return NULL; } + + if(strcmp(extra_fmts[i][0], "%q") == 0) { + if ((extra_str = malloc(2 * sizeof(char))) == NULL) { + free(tmp_str); + return PyErr_NoMemory(); + } + sprintf(extra_str, "%i", quarter); + } else { + if ((qtr_freq % 1000) > 12) { year -= 1; } + + if (strcmp(extra_fmts[i][0], "%f") == 0) { + year_len = 2; + year = year % 100; + } else { year_len = 4; } + + if ((extra_str = malloc((year_len+1) * sizeof(char))) == NULL) { + free(tmp_str); + return PyErr_NoMemory(); + } + + if (year_len == 2 && year < 10) { + sprintf(extra_str, "0%i", year); + } else { sprintf(extra_str, "%i", year); } + } + + } else { + PyErr_SetString(PyExc_RuntimeError,"Unrecogized fmt string"); + return NULL; + } + + result = str_replace(result, extra_fmts[i][1], extra_str); + free(tmp_str); + free(extra_str); + if (result == NULL) { return NULL; } + } + } + + py_result = PyString_FromString(result); + free(result); + + return py_result; +} + +static PyObject * +DateObject___str__(DateObject* self) +{ + + int freq_group = get_freq_group(self->freq); + PyObject *string_arg, *retval; + + string_arg = NULL; + if (freq_group == FR_ANN) { string_arg = Py_BuildValue("(s)", "%Y"); } + else if (freq_group == FR_QTR) { string_arg = Py_BuildValue("(s)", "%FQ%q"); } + else if (freq_group == FR_MTH) { string_arg = Py_BuildValue("(s)", "%b-%Y"); } + else if (freq_group == FR_DAY || + freq_group == FR_BUS || + freq_group == FR_WK || + freq_group == FR_UND) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y"); } + else if (freq_group == FR_HR) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:00"); } + else if (freq_group == FR_MIN) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:%M"); } + else if (freq_group == FR_SEC) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:%M:%S"); } + + if (string_arg == NULL) { return NULL; } + + retval = DateObject_strfmt(self, string_arg); + Py_DECREF(string_arg); + + return retval; +} + +static PyObject * +DateObject_freqstr(DateObject *self, void *closure) { + PyObject *key = PyInt_FromLong(self->freq); + PyObject *freq_aliases = PyDict_GetItem(freq_dict, key); + PyObject *main_alias = PyTuple_GET_ITEM(freq_aliases, 0); + Py_DECREF(key); + Py_INCREF(main_alias); + return main_alias; +} + + +static PyObject * +DateObject___repr__(DateObject* self) +{ + PyObject *py_str_rep, *py_freqstr, *py_repr; + char *str_rep, *freqstr, *repr; + int repr_len; + + py_str_rep = DateObject___str__(self); + if (py_str_rep == NULL) { return NULL; } + + py_freqstr = DateObject_freqstr(self, NULL); + + str_rep = PyString_AsString(py_str_rep); + freqstr = PyString_AsString(py_freqstr); + + repr_len = strlen(str_rep) + strlen(freqstr) + 6; + + if((repr = malloc((repr_len + 1) * sizeof(char))) == NULL) + { return PyErr_NoMemory(); } + + strcpy(repr, "<"); + strcat(repr, freqstr); + strcat(repr, " : "); + strcat(repr, str_rep); + strcat(repr, ">"); + + py_repr = PyString_FromString(repr); + + Py_DECREF(py_str_rep); + Py_DECREF(py_freqstr); + + free(repr); + + return py_repr; +} + +/****************************** + These methods seem rather useless. May or may not implement them. +fromordinal(self, ordinal): + return Date(self.freq, datetime=dt.datetime.fromordinal(ordinal)) +tostring(self): + return str(self) +toobject(self): + return self +isvalid(self): + return True +*******************************/ + + +static DateObject * +DateObject_FromFreqAndValue(int freq, int value) { + + DateObject *result = DateObject_New(); + + PyObject *args = PyTuple_New(0); + PyObject *kw = PyDict_New(); + PyObject *py_freq = PyInt_FromLong(freq); + PyObject *py_value = PyInt_FromLong(value); + + PyDict_SetItemString(kw, "freq", py_freq); + PyDict_SetItemString(kw, "value", py_value); + + Py_DECREF(py_freq); + Py_DECREF(py_value); + + DateObject_init(result, args, kw); + + Py_DECREF(args); + Py_DECREF(kw); + + return result; +} + +static PyObject * +DateObject_date_plus_int(PyObject *date, PyObject *pyint) { + DateObject *dateobj = (DateObject*)date; + if (DateObject_Check(pyint)) { + PyErr_SetString(PyExc_TypeError, "Cannot add two Date objects"); + return NULL; + } + + return (PyObject*)DateObject_FromFreqAndValue(dateobj->freq, PyInt_AsLong(pyint) + dateobj->value); +} + +static PyObject * +DateObject___add__(PyObject *left, PyObject *right) +{ + if (DateObject_Check(left)) { + return DateObject_date_plus_int(left, right); + } else { + return DateObject_date_plus_int(right, left); + } +} + +static PyObject * +DateObject___subtract__(PyObject *left, PyObject *right) +{ + int result; + DateObject *dleft; + if (!DateObject_Check(left)) { + PyErr_SetString(PyExc_ValueError, "Cannot subtract Date from non-Date value"); + return NULL; + } + + dleft = (DateObject*)left; + + if (DateObject_Check(right)) { + DateObject *dright = (DateObject*)right; + if (dleft->freq != dright->freq) { + PyErr_SetString(PyExc_ValueError, "Cannot subtract Dates with different frequency"); + return NULL; + } + result = dleft->value - dright->value; + return PyInt_FromLong(result); + } else { + result = dleft->value - PyInt_AsLong(right); + return (PyObject*)DateObject_FromFreqAndValue(dleft->freq, result); + } +} + +static int +DateObject___compare__(DateObject * obj1, DateObject * obj2) +{ + if (obj1->freq != obj2->freq) { + PyErr_SetString(PyExc_ValueError, + "Cannot compare dates with different frequency"); + return -1; + } + + if (obj1->value < obj2->value) return -1; + if (obj1->value > obj2->value) return 1; + if (obj1->value == obj2->value) return 0; + return -1; +} + +static long +DateObject___hash__(DateObject *self) +{ + register int freq_group = get_freq_group(self->freq); + + /* within a given frequency, hash values are guaranteed to be unique + for different dates. For different frequencies, we make a reasonable + effort to ensure hash values will be unique, but it is not guaranteed */ + if (freq_group == FR_BUS) { + return self->value + 10000000; + } else if (freq_group == FR_WK) { + return self->value + 100000000; + } else { return self->value; } +} + +static PyObject * +DateObject___int__(DateObject *self) +{ + return PyInt_FromLong(self->value); +} + +static PyObject * +DateObject___float__(DateObject *self) +{ + return PyFloat_FromDouble((double)(self->value)); +} + +/*************************************************** + ====== Date Properties ====== +****************************************************/ + +// helper function for date property funcs +static int +DateObject_set_date_info(DateObject *self, struct date_info *dinfo) { + PyObject *daily_obj = DateObject_toordinal(self); + long absdate = PyInt_AsLong(daily_obj); + + Py_DECREF(daily_obj); + + if(dInfoCalc_SetFromAbsDate(dinfo, absdate, + GREGORIAN_CALENDAR)) return -1; + + return 0; +} + +// helper function for date property funcs +static int +DateObject_set_date_info_wtime(DateObject *self, struct date_info *dinfo) { + PyObject *daily_obj = DateObject_toordinal(self); + long absdate = PyInt_AsLong(daily_obj); + double abstime; + + Py_DECREF(daily_obj); + + abstime = getAbsTime(self->freq, absdate, self->value); + + if(dInfoCalc_SetFromAbsDateTime(dinfo, absdate, abstime, + GREGORIAN_CALENDAR)) return -1; + + return 0; +} + +static PyObject * +DateObject_year(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.year); +} + +static int _DateObject_quarter_year(DateObject *self, int *year, int *quarter) { + + PyObject *daily_obj; + long absdate; + + asfreq_info af_info; + int qtr_freq; + + daily_obj = DateObject_toordinal(self); + absdate = PyInt_AsLong(daily_obj); + Py_DECREF(daily_obj); + + if (get_freq_group(self->freq) == FR_QTR) { + qtr_freq = self->freq; + } else { qtr_freq = FR_QTR; } + get_asfreq_info(FR_DAY, qtr_freq, &af_info); + + if(DtoQ_yq(absdate, &af_info, year, quarter) == INT_ERR_CODE) + { return INT_ERR_CODE; } + + if ((qtr_freq % 1000) > 12) { *year -= 1; } + + return 0; +} + +static PyObject * +DateObject_qyear(DateObject *self, void *closure) { + int year, quarter; + if(_DateObject_quarter_year(self, + &year, &quarter) == INT_ERR_CODE) { return NULL; } + return PyInt_FromLong(year); +} + +static PyObject * +DateObject_quarter(DateObject *self, void *closure) { + int year, quarter; + if(_DateObject_quarter_year(self, + &year, &quarter) == INT_ERR_CODE) { return NULL; } + return PyInt_FromLong(quarter); +} + +static PyObject * +DateObject_month(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.month); +} + +static PyObject * +DateObject_day(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.day); +} + +static PyObject * +DateObject_day_of_week(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.day_of_week); +} + +static PyObject * +DateObject_day_of_year(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.day_of_year); +} + +static PyObject * +DateObject_week(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dInfoCalc_ISOWeek(&dinfo)); +} + +static PyObject * +DateObject_hour(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.hour); +} + +static PyObject * +DateObject_minute(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; + return PyInt_FromLong(dinfo.minute); +} + +static PyObject * +DateObject_second(DateObject *self, void *closure) { + struct date_info dinfo; + if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; + return PyInt_FromLong((int)dinfo.second); +} + +static PyObject * +DateObject_datetime(DateObject *self, void *closure) { + PyObject *datetime; + struct date_info dinfo; + if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; + datetime = PyDateTime_FromDateAndTime(dinfo.year, dinfo.month, + dinfo.day, dinfo.hour, + dinfo.minute, (int)dinfo.second, 0); + return datetime; +} + +static int +DateObject_ReadOnlyErr(DateObject *self, PyObject *value, void *closure) { + PyErr_SetString(PyExc_AttributeError, "Cannot set read-only property"); + return -1; +} + +static PyGetSetDef DateObject_getseters[] = { + {"year", (getter)DateObject_year, (setter)DateObject_ReadOnlyErr, + "Returns the year.", NULL}, + {"qyear", (getter)DateObject_qyear, (setter)DateObject_ReadOnlyErr, + "For quarterly frequency dates, returns the year corresponding to the\n" + "year end (start) month. When using QTR or QTR-E based quarterly\n" + "frequencies, this is the fiscal year in a financial context.\n\n" + "For non-quarterly dates, this simply returns the year of the date.", + NULL}, + {"quarter", (getter)DateObject_quarter, (setter)DateObject_ReadOnlyErr, + "Returns the quarter.", NULL}, + {"month", (getter)DateObject_month, (setter)DateObject_ReadOnlyErr, + "Returns the month.", NULL}, + {"week", (getter)DateObject_week, (setter)DateObject_ReadOnlyErr, + "Returns the week.", NULL}, + {"day", (getter)DateObject_day, (setter)DateObject_ReadOnlyErr, + "Returns the day of month.", NULL}, + {"day_of_week", (getter)DateObject_day_of_week, (setter)DateObject_ReadOnlyErr, + "Returns the day of week.", NULL}, + {"day_of_year", (getter)DateObject_day_of_year, (setter)DateObject_ReadOnlyErr, + "Returns the day of year.", NULL}, + {"second", (getter)DateObject_second, (setter)DateObject_ReadOnlyErr, + "Returns the second.", NULL}, + {"minute", (getter)DateObject_minute, (setter)DateObject_ReadOnlyErr, + "Returns the minute.", NULL}, + {"hour", (getter)DateObject_hour, (setter)DateObject_ReadOnlyErr, + "Returns the hour.", NULL}, + + {"freqstr", (getter)DateObject_freqstr, (setter)DateObject_ReadOnlyErr, + "Returns the string representation of frequency.", NULL}, + {"datetime", (getter)DateObject_datetime, (setter)DateObject_ReadOnlyErr, + "Returns the Date object converted to standard python datetime object", + NULL}, + + {NULL} /* Sentinel */ +}; + + +static PyNumberMethods DateObject_as_number = { + (binaryfunc)DateObject___add__, /* nb_add */ + (binaryfunc)DateObject___subtract__, /* nb_subtract */ + 0, /* nb_multiply */ + 0, /* nb_divide */ + 0, /* nb_remainder */ + 0, /* nb_divmod */ + 0, /* nb_power */ + 0, /* nb_negative */ + 0, /* nb_positive */ + 0, /* nb_absolute */ + 0, /* nb_nonzero */ + 0, /* nb_invert */ + 0, /* nb_lshift */ + 0, /* nb_rshift */ + 0, /* nb_and */ + 0, /* nb_xor */ + 0, /* nb_or */ + 0, /* nb_coerce */ + (unaryfunc)DateObject___int__, /* nb_int */ + (unaryfunc)0, /* nb_long */ + (unaryfunc)DateObject___float__, /* nb_float */ + (unaryfunc)0, /* nb_oct */ + (unaryfunc)0, /* nb_hex */ +}; + +static PyMethodDef DateObject_methods[] = { + {"toordinal", (PyCFunction)DateObject_toordinal, METH_NOARGS, + DateObject_toordinal_doc}, + {"strfmt", (PyCFunction)DateObject_strfmt, METH_VARARGS, + DateObject_strfmt_doc}, + {"asfreq", (PyCFunction)DateObject_asfreq, METH_VARARGS | METH_KEYWORDS, + DateObject_asfreq_doc}, + {NULL} /* Sentinel */ +}; + + +static PyTypeObject DateType = { + PyObject_HEAD_INIT(NULL) + 0, /* ob_size */ + "timeseries.Date", /* tp_name */ + sizeof(DateObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + (destructor)DateObject_dealloc, /* tp_dealloc */ + 0, /* tp_print */ + 0, /* tp_getattr */ + 0, /* tp_setattr */ + (cmpfunc)DateObject___compare__, /* tp_compare */ + (reprfunc)DateObject___repr__, /* tp_repr */ + &DateObject_as_number, /* tp_as_number */ + 0, /* tp_as_sequence */ + 0, /* tp_as_mapping */ + (hashfunc)DateObject___hash__, /* tp_hash */ + 0, /* tp_call*/ + (reprfunc)DateObject___str__, /* tp_str */ + 0, /* tp_getattro */ + 0, /* tp_setattro */ + 0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | /* tp_flags */ + Py_TPFLAGS_CHECKTYPES | + Py_TPFLAGS_BASETYPE, + "Defines a Date object, as the combination of a date and a frequency.\n" + "Several options are available to construct a Date object explicitly:\n\n" + "- Give appropriate values to the `year`, `month`, `day`, `quarter`, `hours`,\n" + " `minutes`, `seconds` arguments.\n\n" + " >>> td.Date(freq='Q',year=2004,quarter=3)\n" + " >>> td.Date(freq='D',year=2001,month=1,day=1)\n\n" + "- Use the `string` keyword. This method uses a modified version of the\n" + " mx.DateTime parser submodule. More information is available in its\n" + " documentation.\n\n" + " >>> ts.Date('D', '2007-01-01')\n\n" + "- Use the `datetime` keyword with an existing datetime.datetime object.\n\n" + " >>> td.Date('D', datetime=datetime.datetime.now())", /* tp_doc */ + 0, /* tp_traverse */ + 0, /* tp_clear */ + 0, /* tp_richcompare */ + 0, /* tp_weaklistoffset */ + 0, /* tp_iter */ + 0, /* tp_iternext */ + DateObject_methods, /* tp_methods */ + DateObject_members, /* tp_members */ + DateObject_getseters, /* tp_getset */ + 0, /* tp_base */ + 0, /* tp_dict */ + 0, /* tp_descr_get */ + 0, /* tp_descr_set */ + 0, /* tp_dictoffset */ + (initproc)DateObject_init, /* tp_init */ + 0, /* tp_alloc */ + DateObject_new, /* tp_new */ +}; + + +/////////////////////////////////////////////////////////////////////// + +char c_tdates_check_freq_doc[] = +"translate user specified frequency into frequency constant"; +PyObject * +c_tdates_check_freq(PyObject *self, PyObject *args) { + + PyObject *freq; + int freq_val; + + if (!PyArg_ParseTuple(args, "O:check_freq(freq)", &freq)) return NULL; + if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; + + return PyInt_FromLong(freq_val); +} + +char c_tdates_check_freq_str_doc[] = +"translate user specified frequency into standard string representation"; +PyObject * +c_tdates_check_freq_str(PyObject *self, PyObject *args) { + + PyObject *alias_tuple, *result, *freq_key; + + if ((freq_key = c_tdates_check_freq(self, args)) == NULL) return NULL; + + alias_tuple = PyDict_GetItem(freq_dict, freq_key); + result = PyTuple_GET_ITEM(alias_tuple, 0); + + Py_INCREF(result); + + Py_DECREF(freq_key); + + return result; +} + +char c_tdates_get_freq_group_doc[] = +"translate user specified frequency into frequency group constant"; +PyObject * +c_tdates_get_freq_group(PyObject *self, PyObject *args) { + + PyObject *freq; + int freq_val; + + if (!PyArg_ParseTuple(args, "O:get_freq_group(freq)", &freq)) return NULL; + if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; + + return PyInt_FromLong(get_freq_group(freq_val)); +} + +char c_tdates_thisday_doc[] = +"Returns today's date, at the given frequency\n\n" +":Parameters:\n" +" - freq : string/int\n" +" Frequency to convert the Date to. Accepts any valid frequency\n" +" specification (string or integer)\n"; +PyObject * +c_tdates_thisday(PyObject *self, PyObject *args) { + + PyObject *freq, *init_args, *init_kwargs; + time_t rawtime; + struct tm *timeinfo; + int freq_val; + + DateObject *secondly_date; + + if (!PyArg_ParseTuple(args, "O:thisday(freq)", &freq)) return NULL; + + if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; + + time(&rawtime); + timeinfo = localtime(&rawtime); + + init_args = PyTuple_New(0); + init_kwargs = PyDict_New(); + + DICT_SETINT_STRKEY(init_kwargs, "freq", FR_SEC); + DICT_SETINT_STRKEY(init_kwargs, "year", timeinfo->tm_year+1900); + DICT_SETINT_STRKEY(init_kwargs, "month", timeinfo->tm_mon+1); + DICT_SETINT_STRKEY(init_kwargs, "day", timeinfo->tm_mday); + DICT_SETINT_STRKEY(init_kwargs, "hour", timeinfo->tm_hour); + DICT_SETINT_STRKEY(init_kwargs, "minute", timeinfo->tm_min); + DICT_SETINT_STRKEY(init_kwargs, "second", timeinfo->tm_sec); + + secondly_date = DateObject_New(); + DateObject_init(secondly_date, init_args, init_kwargs); + + Py_DECREF(init_args); + Py_DECREF(init_kwargs); + + if (freq_val != FR_SEC) { + DateObject *result = DateObject_New(); + + long (*asfreq_func)(long, char, asfreq_info*) = NULL; + asfreq_info af_info; + + int date_val; + + get_asfreq_info(FR_SEC, freq_val, &af_info); + asfreq_func = get_asfreq_func(FR_SEC, freq_val, 0); + + date_val = asfreq_func(secondly_date->value, 'A', &af_info); + + Py_DECREF(secondly_date); + + result->freq = freq_val; + result->value = date_val; + + return (PyObject*)result; + + } else { return (PyObject*)secondly_date; } +} + + +PyObject * +DateArray_asfreq(PyObject *self, PyObject *args) +{ + PyArrayObject *fromDates, *toDates; + PyArrayIterObject *iterFrom, *iterTo; + PyObject *fromDateObj, *toDateObj; + char *relation; + int fromFreq, toFreq; + long fromDate, toDate; + long (*asfreq_main)(long, char, asfreq_info*) = NULL; + asfreq_info af_info; + + if (!PyArg_ParseTuple(args, + "Oiis:asfreq(fromDates, fromfreq, tofreq, relation)", + &fromDates, &fromFreq, &toFreq, &relation)) return NULL; + + get_asfreq_info(fromFreq, toFreq, &af_info); + + asfreq_main = get_asfreq_func(fromFreq, toFreq, 0); + + toDates = (PyArrayObject *)PyArray_Copy(fromDates); + + iterFrom = (PyArrayIterObject *)PyArray_IterNew((PyObject *)fromDates); + if (iterFrom == NULL) return NULL; + + iterTo = (PyArrayIterObject *)PyArray_IterNew((PyObject *)toDates); + if (iterTo == NULL) return NULL; + + while (iterFrom->index < iterFrom->size) { + + fromDateObj = PyArray_GETITEM(fromDates, iterFrom->dataptr); + fromDate = PyInt_AsLong(fromDateObj); + CHECK_ASFREQ(toDate = asfreq_main(fromDate, relation[0], &af_info)); + toDateObj = PyInt_FromLong(toDate); + + PyArray_SETITEM(toDates, iterTo->dataptr, toDateObj); + + Py_DECREF(fromDateObj); + Py_DECREF(toDateObj); + + PyArray_ITER_NEXT(iterFrom); + PyArray_ITER_NEXT(iterTo); + } + + Py_DECREF(iterFrom); + Py_DECREF(iterTo); + + return (PyObject *)toDates; + +} + +PyObject * +DateArray_getDateInfo(PyObject *self, PyObject *args) +{ + int freq; + char *info; + + PyArrayObject *array; + PyArrayObject *newArray; + PyArrayIterObject *iterSource, *iterResult; + + PyObject* (*getDateInfo)(DateObject*, void*) = NULL; + + if (!PyArg_ParseTuple(args, "Ois:getDateInfo(array, freq, info)", &array, &freq, &info)) return NULL; + newArray = (PyArrayObject *)PyArray_Copy(array); + + iterSource = (PyArrayIterObject *)PyArray_IterNew((PyObject *)array); + iterResult = (PyArrayIterObject *)PyArray_IterNew((PyObject *)newArray); + + + switch(*info) + { + case 'Y': //year + getDateInfo = &DateObject_year; + break; + case 'F': //"fiscal" year + getDateInfo = &DateObject_qyear; + break; + case 'Q': //quarter + getDateInfo = &DateObject_quarter; + break; + case 'M': //month + getDateInfo = &DateObject_month; + break; + case 'D': //day + getDateInfo = &DateObject_day; + break; + case 'R': //day of year + getDateInfo = &DateObject_day_of_year; + break; + case 'W': //day of week + getDateInfo = &DateObject_day_of_week; + break; + case 'I': //week of year + getDateInfo = &DateObject_week; + break; + case 'H': //hour + getDateInfo = &DateObject_hour; + break; + case 'T': //minute + getDateInfo = &DateObject_minute; + break; + case 'S': //second + getDateInfo = &DateObject_second; + break; + default: + return NULL; + } + + while (iterSource->index < iterSource->size) { + DateObject *curr_date; + PyObject *val, *dInfo; + + val = PyArray_GETITEM(array, iterSource->dataptr); + curr_date = DateObject_FromFreqAndValue(freq, PyInt_AsLong(val)); + dInfo = getDateInfo(curr_date, NULL); + + PyArray_SETITEM(newArray, iterResult->dataptr, dInfo); + + Py_DECREF(val); + Py_DECREF(curr_date); + Py_DECREF(dInfo); + + PyArray_ITER_NEXT(iterSource); + PyArray_ITER_NEXT(iterResult); + } + + Py_DECREF(iterSource); + Py_DECREF(iterResult); + + return (PyObject *) newArray; +} + + +void import_c_tdates(PyObject *m) +{ + + if (PyType_Ready(&DateType) < 0) return; + + DateCalc_Error = + PyErr_NewException("c_tdates.DateCalc_Error", NULL, NULL); + DateCalc_RangeError = + PyErr_NewException("c_tdates.DateCalc_RangeError", NULL, NULL); + + import_array(); + PyDateTime_IMPORT; + + Py_INCREF(&DateType); + PyModule_AddObject(m, "Date", (PyObject *)(&DateType)); + + if(build_freq_dict() == INT_ERR_CODE) { + PyErr_SetString( \ + PyExc_ImportError, \ + "initialization of module timeseries.c_tdates failed"); + return; + }; + + PyModule_AddObject(m, "freq_dict", freq_dict); + PyModule_AddObject(m, "freq_dict_rev", freq_dict_rev); + PyModule_AddObject(m, "freq_constants", freq_constants); + + PyModule_AddObject(m, "DateCalc_Error", DateCalc_Error); + PyModule_AddObject(m, "DateCalc_RangeError", DateCalc_RangeError); + +} Added: trunk/Lib/sandbox/timeseries/src/c_tseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-09 17:08:29 UTC (rev 2974) +++ trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-09 17:09:36 UTC (rev 2975) @@ -0,0 +1,835 @@ +#include "c_tdates.h" +#include "c_tseries.h" + +/* Helper function for TimeSeries_convert: + determine the size of the second dimension for the resulting + converted array */ +static long get_height(int fromFreq, int toFreq) { + + int maxBusDaysPerYear, maxBusDaysPerQuarter, maxBusDaysPerMonth; + int maxDaysPerYear, maxDaysPerQuarter, maxDaysPerMonth; + + int fromGroup = get_freq_group(fromFreq); + int toGroup = get_freq_group(toFreq); + + if (fromGroup == FR_UND) { fromGroup = FR_DAY; } + + maxBusDaysPerYear = 262; + maxBusDaysPerQuarter = 66; + maxBusDaysPerMonth = 23; + + maxDaysPerYear = 366; + maxDaysPerQuarter = 92; + maxDaysPerMonth = 31; + + switch(fromGroup) + { + case FR_ANN: return 1; + case FR_QTR: + switch(toGroup) + { + case FR_ANN: return 4; + default: return 1; + } + case FR_MTH: //monthly + switch(toGroup) + { + case FR_ANN: return 12; + case FR_QTR: return 3; + default: return 1; + } + case FR_WK: //weekly + switch(toGroup) + { + case FR_ANN: return 53; + case FR_QTR: return 13; + case FR_MTH: return 4; + default: return 1; + } + case FR_BUS: //business + switch(toGroup) + { + case FR_ANN: return maxBusDaysPerYear;; + case FR_QTR: return maxBusDaysPerQuarter; + case FR_MTH: return maxBusDaysPerMonth; + case FR_WK: return 5; + default: return 1; + } + case FR_DAY: //daily + switch(toGroup) + { + case FR_ANN: return maxDaysPerYear;; + case FR_QTR: return maxDaysPerQuarter; + case FR_MTH: return maxDaysPerMonth; + case FR_WK: return 7; + default: return 1; + } + case FR_HR: //hourly + switch(toGroup) + { + case FR_ANN: return 24 * maxDaysPerYear;; + case FR_QTR: return 24 * maxDaysPerQuarter; + case FR_MTH: return 24 * maxDaysPerMonth; + case FR_WK: return 24 * 7; + case FR_DAY: return 24; + case FR_BUS: return 24; + default: return 1; + } + case FR_MIN: //minutely + switch(toGroup) + { + case FR_ANN: return 24 * 60 * maxDaysPerYear;; + case FR_QTR: return 24 * 60 * maxDaysPerQuarter; + case FR_MTH: return 24 * 60 * maxDaysPerMonth; + case FR_WK: return 24 * 60 * 7; + case FR_DAY: return 24 * 60; + case FR_BUS: return 24 * 60; + case FR_HR: return 60; + default: return 1; + } + case FR_SEC: //minutely + switch(toGroup) + { + case FR_ANN: return 24 * 60 * 60 * maxDaysPerYear;; + case FR_QTR: return 24 * 60 * 60 * maxDaysPerQuarter; + case FR_MTH: return 24 * 60 * 60 * maxDaysPerMonth; + case FR_WK: return 24 * 60 * 60 * 7; + case FR_DAY: return 24 * 60 * 60; + case FR_BUS: return 24 * 60 * 60; + case FR_HR: return 60 * 60; + case FR_MIN: return 60; + default: return 1; + } + default: return 1; + } +} + +PyObject * +TimeSeries_convert(PyObject *self, PyObject *args) +{ + PyObject *arrayTest; + PyArrayObject *array, *newArray; + PyArrayObject *mask, *newMask; + + PyObject *returnVal = NULL; + PyObject *start_index_retval; + + long startIndex; + long newStart, newStartTemp; + long newEnd, newEndTemp; + long newLen, newHeight; + int i; + long currIndex, prevIndex; + long nd; + npy_intp *dim, *newIdx; + long currPerLen; + char *position; + PyObject *fromFreq_arg, *toFreq_arg; + int fromFreq, toFreq; + char relation; + asfreq_info af_info; + + PyObject *val, *valMask; + + long (*asfreq_main)(long, char, asfreq_info*) = NULL; + long (*asfreq_endpoints)(long, char, asfreq_info*) = NULL; + long (*asfreq_reverse)(long, char, asfreq_info*) = NULL; + + returnVal = PyDict_New(); + + if (!PyArg_ParseTuple(args, + "OOOslO:convert(array, fromfreq, tofreq, position, startIndex, mask)", + &array, &fromFreq_arg, &toFreq_arg, + &position, &startIndex, &mask)) return NULL; + + if((fromFreq = check_freq(fromFreq_arg)) == INT_ERR_CODE) return NULL; + if((toFreq = check_freq(toFreq_arg)) == INT_ERR_CODE) return NULL; + + if (toFreq == fromFreq) + { + PyObject *sidx; + newArray = (PyArrayObject *)PyArray_Copy(array); + newMask = (PyArrayObject *)PyArray_Copy(mask); + sidx = PyInt_FromLong(startIndex); + + PyDict_SetItemString(returnVal, "values", (PyObject*)newArray); + PyDict_SetItemString(returnVal, "mask", (PyObject*)newMask); + PyDict_SetItemString(returnVal, "startindex", sidx); + + Py_DECREF(newArray); + Py_DECREF(newMask); + Py_DECREF(sidx); + + return returnVal; + } + + switch(position[0]) + { + case 'S': + // start -> before + relation = 'B'; + break; + case 'E': + // end -> after + relation = 'A'; + break; + default: + return NULL; + break; + } + + get_asfreq_info(fromFreq, toFreq, &af_info); + + asfreq_main = get_asfreq_func(fromFreq, toFreq, 1); + asfreq_endpoints = get_asfreq_func(fromFreq, toFreq, 0); + + //convert start index to new frequency + CHECK_ASFREQ(newStartTemp = asfreq_main(startIndex, 'B', &af_info)); + if (newStartTemp < 1) { + CHECK_ASFREQ(newStart = asfreq_endpoints(startIndex, 'A', &af_info)); + } + else { newStart = newStartTemp; } + + //convert end index to new frequency + CHECK_ASFREQ(newEndTemp = asfreq_main(startIndex+array->dimensions[0]-1, 'A', &af_info)); + if (newEndTemp < 1) { + CHECK_ASFREQ(newEnd = asfreq_endpoints(startIndex+array->dimensions[0]-1, 'B', &af_info)); + } + else { newEnd = newEndTemp; } + + if (newStart < 1) { + PyErr_SetString(PyExc_ValueError, "start_date outside allowable range for destination frequency"); + return NULL; + } + + newLen = newEnd - newStart + 1; + newHeight = get_height(fromFreq, toFreq); + + if (newHeight > 1) { + long tempval; + asfreq_info af_info_rev; + + get_asfreq_info(toFreq, fromFreq, &af_info_rev); + asfreq_reverse = get_asfreq_func(toFreq, fromFreq, 0); + + CHECK_ASFREQ(tempval = asfreq_reverse(newStart, 'B', &af_info_rev)); + currPerLen = startIndex - tempval; + + nd = 2; + dim = PyDimMem_NEW(nd); + dim[0] = (npy_intp)newLen; + dim[1] = (npy_intp)newHeight; + } else { + nd = 1; + dim = PyDimMem_NEW(nd); + dim[0] = (npy_intp)newLen; + } + + newIdx = PyDimMem_NEW(nd); + arrayTest = PyArray_SimpleNew(nd, dim, array->descr->type_num); + if (arrayTest == NULL) { return NULL; } + newArray = (PyArrayObject*)arrayTest; + newMask = (PyArrayObject*)PyArray_SimpleNew(nd, dim, mask->descr->type_num); + + PyDimMem_FREE(dim); + + PyArray_FILLWBYTE(newArray,0); + PyArray_FILLWBYTE(newMask,1); + + prevIndex = newStart; + + //set values in the new array + for (i = 0; i < array->dimensions[0]; i++) { + + val = PyArray_GETITEM(array, PyArray_GetPtr(array, &i)); + valMask = PyArray_GETITEM(mask, PyArray_GetPtr(mask, &i)); + + CHECK_ASFREQ(currIndex = asfreq_main(startIndex + i, relation, &af_info)); + + newIdx[0] = currIndex-newStart; + + if (newHeight > 1) { + + if (currIndex != prevIndex) + { + //reset period length + currPerLen = 0; + prevIndex = currIndex; + } + + newIdx[1] = currPerLen; + currPerLen++; + } + + if (newIdx[0] > -1) { + PyArray_SETITEM(newArray, PyArray_GetPtr(newArray, newIdx), val); + PyArray_SETITEM(newMask, PyArray_GetPtr(newMask, newIdx), valMask); + } + + Py_DECREF(val); + Py_DECREF(valMask); + + } + + PyDimMem_FREE(newIdx); + + start_index_retval = (PyObject*)PyInt_FromLong(newStart); + + PyDict_SetItemString(returnVal, "values", (PyObject*)newArray); + PyDict_SetItemString(returnVal, "mask", (PyObject*)newMask); + PyDict_SetItemString(returnVal, "startindex", start_index_retval); + + Py_DECREF(newArray); + Py_DECREF(newMask); + Py_DECREF(start_index_retval); + + return returnVal; +} + + +/* This function is directly copied from direct copy of function in */ +/* Return typenumber from dtype2 unless it is NULL, then return + NPY_DOUBLE if dtype1->type_num is integer or bool + and dtype1->type_num otherwise. +*/ +static int +_get_type_num_double(PyArray_Descr *dtype1, PyArray_Descr *dtype2) +{ + if (dtype2 != NULL) + return dtype2->type_num; + + /* For integer or bool data-types */ + if (dtype1->type_num < NPY_FLOAT) { + return NPY_DOUBLE; + } + else { + return dtype1->type_num; + } +} + +#define _CHKTYPENUM(typ) ((typ) ? (typ)->type_num : PyArray_NOTYPE) + +/* validates the standard arguments to moving functions and set the original + mask, original ndarray, and mask for the result */ +static PyObject * +check_mov_args(PyObject *orig_arrayobj, int span, int min_win_size, + PyObject **orig_ndarray, PyObject **result_mask) { + + PyObject *orig_mask=NULL; + PyArrayObject **orig_ndarray_tmp, **result_mask_tmp; + int *raw_result_mask; + + if (!PyArray_Check(orig_arrayobj)) { + PyErr_SetString(PyExc_ValueError, "array must be a valid subtype of ndarray"); + return NULL; + } + + // check if array has a mask, and if that mask is an array + if (PyObject_HasAttrString(orig_arrayobj, "_mask")) { + PyObject *tempMask = PyObject_GetAttrString(orig_arrayobj, "_mask"); + if (PyArray_Check(tempMask)) { + orig_mask = PyArray_EnsureArray(tempMask); + } else { + Py_DECREF(tempMask); + } + } + + *orig_ndarray = PyArray_EnsureArray(orig_arrayobj); + orig_ndarray_tmp = (PyArrayObject**)orig_ndarray; + + if ((*orig_ndarray_tmp)->nd != 1) { + PyErr_SetString(PyExc_ValueError, "array must be 1 dimensional"); + return NULL; + } + + if (span < min_win_size) { + char *error_str; + error_str = malloc(60 * sizeof(char)); + MEM_CHECK(error_str) + sprintf(error_str, + "span must be greater than or equal to %i", + min_win_size); + PyErr_SetString(PyExc_ValueError, error_str); + free(error_str); + return NULL; + } + + raw_result_mask = malloc((*orig_ndarray_tmp)->dimensions[0] * sizeof(int)); + MEM_CHECK(raw_result_mask) + + { + PyArrayObject *orig_mask_tmp; + int i, valid_points=0, is_masked; + + orig_mask_tmp = (PyArrayObject*)orig_mask; + + for (i=0; i<((*orig_ndarray_tmp)->dimensions[0]); i++) { + + is_masked=0; + + if (orig_mask != NULL) { + PyObject *valMask; + valMask = PyArray_GETITEM(orig_mask_tmp, + PyArray_GetPtr(orig_mask_tmp, &i)); + is_masked = (int)PyInt_AsLong(valMask); + Py_DECREF(valMask); + } + + if (is_masked) { + valid_points=0; + } else { + if (valid_points < span) { valid_points += 1; } + if (valid_points < span) { is_masked = 1; } + } + + raw_result_mask[i] = is_masked; + } + } + + *result_mask = PyArray_SimpleNewFromData( + 1, (*orig_ndarray_tmp)->dimensions, + PyArray_INT32, raw_result_mask); + MEM_CHECK(*result_mask) + result_mask_tmp = (PyArrayObject**)result_mask; + (*result_mask_tmp)->flags = ((*result_mask_tmp)->flags) | NPY_OWNDATA; + return 0; +} + +/* computation portion of moving sum. Appropriate mask is overlayed on top + afterwards */ +static PyObject* +calc_mov_sum(PyArrayObject *orig_ndarray, int span, int rtype) +{ + PyArrayObject *result_ndarray=NULL; + int i; + + result_ndarray = (PyArrayObject*)PyArray_ZEROS( + orig_ndarray->nd, + orig_ndarray->dimensions, + rtype, 0); + ERR_CHECK(result_ndarray) + + for (i=0; idimensions[0]; i++) { + + PyObject *val=NULL, *mov_sum_val=NULL; + + val = PyArray_GETITEM(orig_ndarray, PyArray_GetPtr(orig_ndarray, &i)); + + if (i == 0) { + mov_sum_val = val; + } else { + int prev_idx = i-1; + PyObject *mov_sum_prevval; + mov_sum_prevval= PyArray_GETITEM(result_ndarray, + PyArray_GetPtr(result_ndarray, &prev_idx)); + mov_sum_val = np_add(val, mov_sum_prevval); + Py_DECREF(mov_sum_prevval); + ERR_CHECK(mov_sum_val) + + if (i >= span) { + PyObject *temp_val, *rem_val; + int rem_idx = i-span; + temp_val = mov_sum_val; + rem_val = PyArray_GETITEM(orig_ndarray, + PyArray_GetPtr(orig_ndarray, &rem_idx)); + + mov_sum_val = np_subtract(temp_val, rem_val); + ERR_CHECK(mov_sum_val) + + Py_DECREF(temp_val); + Py_DECREF(rem_val); + } + } + + PyArray_SETITEM(result_ndarray, + PyArray_GetPtr(result_ndarray, &i), + mov_sum_val); + + if (mov_sum_val != val) { Py_DECREF(val); } + + Py_DECREF(mov_sum_val); + } + + return (PyObject*)result_ndarray; + +} + +PyObject * +MaskedArray_mov_sum(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *orig_arrayobj=NULL, *orig_ndarray=NULL, + *result_ndarray=NULL, *result_mask=NULL, + *result_dict=NULL; + PyArray_Descr *dtype=NULL; + + int rtype, span; + + static char *kwlist[] = {"array", "span", "dtype", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, + "Oi|O&:mov_sum(array, span, dtype)", kwlist, + &orig_arrayobj, &span, + PyArray_DescrConverter2, &dtype)) return NULL; + + check_mov_args(orig_arrayobj, span, 1, + &orig_ndarray, &result_mask); + + rtype = _CHKTYPENUM(dtype); + + result_ndarray = calc_mov_sum((PyArrayObject*)orig_ndarray, + span, rtype); + ERR_CHECK(result_ndarray) + + result_dict = PyDict_New(); + MEM_CHECK(result_dict) + PyDict_SetItemString(result_dict, "array", result_ndarray); + PyDict_SetItemString(result_dict, "mask", result_mask); + + Py_DECREF(result_ndarray); + Py_DECREF(result_mask); + return result_dict; +} + +PyObject * +MaskedArray_mov_average(PyObject *self, PyObject *args, PyObject *kwds) +{ + PyObject *orig_arrayobj=NULL, *orig_ndarray=NULL, + *result_ndarray=NULL, *result_mask=NULL, + *result_dict=NULL, + *mov_sum=NULL, *denom=NULL; + PyArray_Descr *dtype=NULL; + + int rtype, span; + + static char *kwlist[] = {"array", "span", "dtype", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, kwds, + "Oi|O&:mov_average(array, span, dtype)", kwlist, + &orig_arrayobj, &span, + PyArray_DescrConverter2, &dtype)) return NULL; + + + check_mov_args(orig_arrayobj, span, 2, + &orig_ndarray, &result_mask); + + rtype = _get_type_num_double(((PyArrayObject*)orig_ndarray)->descr, dtype); + + mov_sum = calc_mov_sum((PyArrayObject*)orig_ndarray, span, rtype); + ERR_CHECK(mov_sum) + + denom = PyFloat_FromDouble(1.0/(double)(span)); + + result_ndarray = np_multiply(mov_sum, denom); + ERR_CHECK(result_ndarray) + + Py_DECREF(mov_sum); + Py_DECREF(denom); + + result_dict = PyDict_New(); + MEM_CHECK(result_dict) + PyDict_SetItemString(result_dict, "array", result_ndarray); + PyDict_SetItemString(result_dict, "mask", result_mask); + + Py_DECREF(result_ndarray); + Py_DECREF(result_mask); + return result_dict; +} + + +/* computation portion of moving median. Appropriate mask is overlayed on top + afterwards. + + The algorithm used here is based on the code found at: + http://cran.r-project.org/src/contrib/Devel/runStat_1.1.tar.gz + + This code was originally released under the GPL, but the author + (David Brahm) has granted me (and scipy) permission to use it under the BSD + license. */ +PyObject* +calc_mov_median(PyArrayObject *orig_ndarray, int span, int rtype) +{ + PyArrayObject *result_ndarray=NULL; + PyObject **result_array, **ref_array, **even_array=NULL; + PyObject *new_val, *old_val; + PyObject *temp_add, *one_half; + int a, i, k, R, arr_size, z; + int *r; + + arr_size = orig_ndarray->dimensions[0]; + + result_ndarray = (PyArrayObject*)PyArray_ZEROS( + orig_ndarray->nd, + orig_ndarray->dimensions, + rtype, 0); + ERR_CHECK(result_ndarray) + + if (arr_size >= span) { + result_array = calloc(arr_size, sizeof(PyObject*)); + MEM_CHECK(result_array) + + /* this array will be used for quick access to the data in the original + array (so PyArray_GETITEM doesn't have to be used over and over in the + main loop) */ + ref_array = malloc(arr_size * sizeof(PyObject*)); + MEM_CHECK(ref_array) + + for (i=0; i= span-1; i--) { + a = span; + z = i - span + 1; + old_val = ref_array[i+1]; + new_val = ref_array[i-span+1]; + + for (k=span-1; k > 0; k--) { + r[k] = r[k-1]; /* Shift previous iteration's ranks */ + if (np_greater_equal(ref_array[z+k], new_val)) {r[k]++; a--;} + if (np_greater(ref_array[z+k], old_val)) {r[k]--;} + + if (r[k]==R) { + result_array[i] = ref_array[z+k]; + } + + if (even_array != NULL) { + if (r[k]==R) { + even_array[0] = ref_array[z+k]; + } else if (r[k] == (R+1)) { + even_array[1] = ref_array[z+k]; + } + } else { + if (r[k]==R) { + result_array[i] = ref_array[z+k]; + } + } + + } + + r[0] = a; + + if (even_array != NULL) { + if (a==R) { + even_array[0] = new_val; + } else if (a == (R+1)) { + even_array[1] = new_val; + } + + temp_add = np_add(even_array[0], even_array[1]); + result_array[i] = np_multiply(temp_add, one_half);; + Py_DECREF(temp_add); + + } else { + if (a==R) { + result_array[i] = new_val; + } + } + + } + + Py_DECREF(one_half); + + for (i=span-1; idescr, dtype); + + mov_sum = calc_mov_sum((PyArrayObject*)orig_ndarray, span, rtype); + ERR_CHECK(mov_sum) + + result_temp1 = np_multiply(orig_ndarray, orig_ndarray); + ERR_CHECK(result_temp1) + + mov_sum_sq = calc_mov_sum((PyArrayObject*)result_temp1, span, rtype); + Py_DECREF(result_temp1); + ERR_CHECK(mov_sum_sq) + + + /* + formulas from: + http://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods + */ + if (bias == 0) { + denom1 = PyFloat_FromDouble(1.0/(double)(span-1)); + denom2 = PyFloat_FromDouble(1.0/(double)(span*(span-1))); + } else { + denom1 = PyFloat_FromDouble(1.0/(double)span); + denom2 = PyFloat_FromDouble(1.0/(double)(span*span)); + } + + result_temp1 = np_multiply(mov_sum_sq, denom1); + ERR_CHECK(result_temp1) + Py_DECREF(mov_sum_sq); + Py_DECREF(denom1); + + result_temp3 = np_multiply(mov_sum, mov_sum); + ERR_CHECK(result_temp3) + Py_DECREF(mov_sum); + + result_temp2 = np_multiply(result_temp3, denom2); + ERR_CHECK(result_temp2) + Py_DECREF(result_temp3); + Py_DECREF(denom2); + + result_temp3 = np_subtract(result_temp1, result_temp2); + ERR_CHECK(result_temp3) + Py_DECREF(result_temp1); + Py_DECREF(result_temp2); + + if (is_variance) { + result_ndarray = result_temp3; + } else { + result_temp1 = np_sqrt(result_temp3); + ERR_CHECK(result_temp1) + Py_DECREF(result_temp3); + result_ndarray = result_temp1; + } + + result_dict = PyDict_New(); + MEM_CHECK(result_dict) + PyDict_SetItemString(result_dict, "array", result_ndarray); + PyDict_SetItemString(result_dict, "mask", result_mask); + + Py_DECREF(result_ndarray); + Py_DECREF(result_mask); + return result_dict; +} + +void import_c_tseries(PyObject *m) { import_array(); } From scipy-svn at scipy.org Wed May 9 13:10:05 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 12:10:05 -0500 (CDT) Subject: [Scipy-svn] r2976 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070509171005.C211639C0BB@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 12:10:00 -0500 (Wed, 09 May 2007) New Revision: 2976 Modified: trunk/Lib/sandbox/timeseries/src/cseries.c Log: major overhaul of C code Modified: trunk/Lib/sandbox/timeseries/src/cseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-09 17:09:36 UTC (rev 2975) +++ trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-09 17:10:00 UTC (rev 2976) @@ -1,3686 +1,40 @@ -#include -#include -#include -#include -#include "arrayobject.h" +#include "c_lib.h" +#include "c_tdates.h" +#include "c_tseries.h" -static char cseries_doc[] = "Speed sensitive time series operations"; -#define FR_ANN 1000 /* Annual */ -#define FR_ANNDEC FR_ANN /* Annual - December year end*/ -#define FR_ANNJAN 1001 /* Annual - January year end*/ -#define FR_ANNFEB 1002 /* Annual - February year end*/ -#define FR_ANNMAR 1003 /* Annual - March year end*/ -#define FR_ANNAPR 1004 /* Annual - April year end*/ -#define FR_ANNMAY 1005 /* Annual - May year end*/ -#define FR_ANNJUN 1006 /* Annual - June year end*/ -#define FR_ANNJUL 1007 /* Annual - July year end*/ -#define FR_ANNAUG 1008 /* Annual - August year end*/ -#define FR_ANNSEP 1009 /* Annual - September year end*/ -#define FR_ANNOCT 1010 /* Annual - October year end*/ -#define FR_ANNNOV 1011 /* Annual - November year end*/ - -/* The standard quarterly frequencies. Year is determined by what year the end - month lies in. */ -#define FR_QTR 2000 /* Quarterly - December year end (default quarterly) */ -#define FR_QTRDEC FR_QTR /* Quarterly - December year end */ -#define FR_QTRJAN 2001 /* Quarterly - January year end */ -#define FR_QTRFEB 2002 /* Quarterly - February year end */ -#define FR_QTRMAR 2003 /* Quarterly - March year end */ -#define FR_QTRAPR 2004 /* Quarterly - April year end */ -#define FR_QTRMAY 2005 /* Quarterly - May year end */ -#define FR_QTRJUN 2006 /* Quarterly - June year end */ -#define FR_QTRJUL 2007 /* Quarterly - July year end */ -#define FR_QTRAUG 2008 /* Quarterly - August year end */ -#define FR_QTRSEP 2009 /* Quarterly - September year end */ -#define FR_QTROCT 2010 /* Quarterly - October year end */ -#define FR_QTRNOV 2011 /* Quarterly - November year end */ - -/* End period based quarterly frequencies. Year is determined by what year the - end month lies in. */ -#define FR_QTREDEC FR_QTRDEC /* Quarterly - December year end*/ -#define FR_QTREJAN FR_QTRJAN /* Quarterly - January year end*/ -#define FR_QTREFEB FR_QTRFEB /* Quarterly - February year end*/ -#define FR_QTREMAR FR_QTRMAR /* Quarterly - March year end*/ -#define FR_QTREAPR FR_QTRAPR /* Quarterly - April year end*/ -#define FR_QTREMAY FR_QTRMAY /* Quarterly - May year end*/ -#define FR_QTREJUN FR_QTRJUN /* Quarterly - June year end*/ -#define FR_QTREJUL FR_QTRJUL /* Quarterly - July year end*/ -#define FR_QTREAUG FR_QTRAUG /* Quarterly - August year end*/ -#define FR_QTRESEP FR_QTRSEP /* Quarterly - September year end*/ -#define FR_QTREOCT FR_QTROCT /* Quarterly - October year end*/ -#define FR_QTRENOV FR_QTRNOV /* Quarterly - November year end*/ - -/* Starting period based quarterly frequencies. Year is determined by what year - the starting month lies in. */ -#define FR_QTRSDEC FR_QTRDEC+12 /* Quarterly - December year end*/ -#define FR_QTRSJAN FR_QTRJAN+12 /* Quarterly - January year end*/ -#define FR_QTRSFEB FR_QTRFEB+12 /* Quarterly - February year end*/ -#define FR_QTRSMAR FR_QTRMAR+12 /* Quarterly - March year end*/ -#define FR_QTRSAPR FR_QTRAPR+12 /* Quarterly - April year end*/ -#define FR_QTRSMAY FR_QTRMAY+12 /* Quarterly - May year end*/ -#define FR_QTRSJUN FR_QTRJUN+12 /* Quarterly - June year end*/ -#define FR_QTRSJUL FR_QTRJUL+12 /* Quarterly - July year end*/ -#define FR_QTRSAUG FR_QTRAUG+12 /* Quarterly - August year end*/ -#define FR_QTRSSEP FR_QTRSEP+12 /* Quarterly - September year end*/ -#define FR_QTRSOCT FR_QTROCT+12 /* Quarterly - October year end*/ -#define FR_QTRSNOV FR_QTRNOV+12 /* Quarterly - November year end*/ - -#define FR_MTH 3000 /* Monthly */ - -#define FR_WK 4000 /* Weekly */ -#define FR_WKSUN FR_WK /* Weekly - Sunday end of week */ -#define FR_WKMON 4001 /* Weekly - Monday end of week */ -#define FR_WKTUE 4002 /* Weekly - Tuesday end of week */ -#define FR_WKWED 4003 /* Weekly - Wednesday end of week */ -#define FR_WKTHU 4004 /* Weekly - Thursday end of week */ -#define FR_WKFRI 4005 /* Weekly - Friday end of week */ -#define FR_WKSAT 4006 /* Weekly - Saturday end of week */ - -#define FR_BUS 5000 /* Business days */ -#define FR_DAY 6000 /* Daily */ -#define FR_HR 7000 /* Hourly */ -#define FR_MIN 8000 /* Minutely */ -#define FR_SEC 9000 /* Secondly */ -#define FR_UND -10000 /* Undefined */ - -#define INT_ERR_CODE -999 - -#define HIGHFREQ_ORIG 719163 - -#define CHECK_ASFREQ(result) if ((result) == INT_ERR_CODE) return NULL - -#define MEM_CHECK(item) if (item == NULL) { return PyErr_NoMemory(); } -#define ERR_CHECK(item) if (item == NULL) { return NULL; } - -static int get_freq_group(int freq) { - return (freq/1000)*1000; -} - -struct asfreq_info{ - int from_week_end; //day the week ends on in the "from" frequency - int to_week_end; //day the week ends on in the "to" frequency - - int from_a_year_end; //month the year ends on in the "from" frequency - int to_a_year_end; //month the year ends on in the "to" frequency - - int from_q_year_end; //month the year ends on in the "from" frequency - int to_q_year_end; //month the year ends on in the "to" frequency -}; - -static struct asfreq_info NULL_AF_INFO; - -/********************************************************* -** Python callbacks. These functions must be called by ** -** the module __init__ script ** -*********************************************************/ -static PyObject * -set_callback(PyObject *args, PyObject **callback) -{ - PyObject *result = NULL; - PyObject *temp; - - if (PyArg_ParseTuple(args, "O:set_callback", &temp)) { - - if (!PyCallable_Check(temp)) { - PyErr_SetString(PyExc_TypeError, "parameter must be callable"); - return NULL; - } - - Py_XINCREF(temp); // Add a reference to new callback - Py_XDECREF(*callback); // Dispose of previous callback - *callback = temp; // Remember new callback - // Boilerplate to return "None" - Py_INCREF(Py_None); - result = Py_None; - } - return result; -} - -static PyObject *DateFromString = NULL; -static char set_callback_DateFromString_doc[] = -"set DateFromString function python callback"; -static PyObject * -set_callback_DateFromString(PyObject *dummy, PyObject *args) { - return set_callback(args, &DateFromString); -} - -static PyObject *DateTimeFromString = NULL; -static char set_callback_DateTimeFromString_doc[] = -"set DateTimeFromString function python callback"; -static PyObject * -set_callback_DateTimeFromString(PyObject *dummy, PyObject *args) { - return set_callback(args, &DateTimeFromString); -} - -/*********************************************************/ - -static char * -str_uppercase(char *str) { - if (str) { - int i, len=strlen(str); - char *result; - if((result = malloc((len + 1)*sizeof(char))) == NULL) { - return (char *)PyErr_NoMemory(); - } - strcpy(result, str); - - for (i=0;iday_of_year-1) - dinfo->day_of_week + 3; - if (week >= 0) week = week / 7 + 1; - - /* Verify */ - if (week < 0) { - /* The day lies in last week of the previous year */ - if ((week > -2) || - (week == -2 && dInfoCalc_Leapyear(dinfo->year-1, dinfo->calendar))) - week = 53; - else - week = 52; - } else if (week == 53) { - /* Check if the week belongs to year or year+1 */ - if (31-dinfo->day + dinfo->day_of_week < 3) { - week = 1; - } - } - - return week; -} - - -/* Return the day of the week for the given absolute date. */ -static -int dInfoCalc_DayOfWeek(register long absdate) -{ - int day_of_week; - - if (absdate >= 1) { - day_of_week = (absdate - 1) % 7; - } else { - day_of_week = 6 - ((-absdate) % 7); - } - return day_of_week; -} - -/* Return the year offset, that is the absolute date of the day - 31.12.(year-1) in the given calendar. - - Note: - For the Julian calendar we shift the absdate (which is measured - using the Gregorian Epoch) value by two days because the Epoch - (0001-01-01) in the Julian calendar lies 2 days before the Epoch in - the Gregorian calendar. */ -static -int dInfoCalc_YearOffset(register long year, - int calendar) -{ - year--; - if (calendar == GREGORIAN_CALENDAR) { - if (year >= 0 || -1/4 == -1) - return year*365 + year/4 - year/100 + year/400; - else - return year*365 + (year-3)/4 - (year-99)/100 + (year-399)/400; - } - else if (calendar == JULIAN_CALENDAR) { - if (year >= 0 || -1/4 == -1) - return year*365 + year/4 - 2; - else - return year*365 + (year-3)/4 - 2; - } - Py_Error(DateCalc_Error, "unknown calendar"); - onError: - return -1; -} - - -/* Set the instance's value using the given date and time. calendar - may be set to the flags: GREGORIAN_CALENDAR, - JULIAN_CALENDAR to indicate the calendar to be used. */ - -static -int dInfoCalc_SetFromDateAndTime(struct date_info *dinfo, - int year, - int month, - int day, - int hour, - int minute, - double second, - int calendar) -{ - - /* Calculate the absolute date */ - { - int leap; - long yearoffset,absdate; - - /* Range check */ - Py_AssertWithArg(year > -(INT_MAX / 366) && year < (INT_MAX / 366), - DateCalc_RangeError, - "year out of range: %i", - year); - - /* Is it a leap year ? */ - leap = dInfoCalc_Leapyear(year,calendar); - - /* Negative month values indicate months relative to the years end */ - if (month < 0) month += 13; - Py_AssertWithArg(month >= 1 && month <= 12, - DateCalc_RangeError, - "month out of range (1-12): %i", - month); - - /* Negative values indicate days relative to the months end */ - if (day < 0) day += days_in_month[leap][month - 1] + 1; - Py_AssertWithArg(day >= 1 && day <= days_in_month[leap][month - 1], - DateCalc_RangeError, - "day out of range: %i", - day); - - yearoffset = dInfoCalc_YearOffset(year,calendar); - if (PyErr_Occurred()) goto onError; - - absdate = day + month_offset[leap][month - 1] + yearoffset; - - dinfo->absdate = absdate; - - dinfo->year = year; - dinfo->month = month; - dinfo->quarter = ((month-1)/3)+1; - dinfo->day = day; - - dinfo->day_of_week = dInfoCalc_DayOfWeek(absdate); - dinfo->day_of_year = (short)(absdate - yearoffset); - - dinfo->calendar = calendar; - } - - /* Calculate the absolute time */ - { - Py_AssertWithArg(hour >= 0 && hour <= 23, - DateCalc_RangeError, - "hour out of range (0-23): %i", - hour); - Py_AssertWithArg(minute >= 0 && minute <= 59, - DateCalc_RangeError, - "minute out of range (0-59): %i", - minute); - Py_AssertWithArg(second >= (double)0.0 && - (second < (double)60.0 || - (hour == 23 && minute == 59 && - second < (double)61.0)), - DateCalc_RangeError, - "second out of range (0.0 - <60.0; <61.0 for 23:59): %f", - second); - - dinfo->abstime = (double)(hour*3600 + minute*60) + second; - - dinfo->hour = hour; - dinfo->minute = minute; - dinfo->second = second; - } - return 0; - onError: - return -1; -} - -static int monthToQuarter(int month) { return ((month-1)/3)+1; } - -/* Sets the date part of the date_info struct using the indicated - calendar. - - XXX This could also be done using some integer arithmetics rather - than with this iterative approach... */ -static -int dInfoCalc_SetFromAbsDate(register struct date_info *dinfo, - long absdate, - int calendar) -{ - register long year; - long yearoffset; - int leap,dayoffset; - int *monthoffset; - - /* Approximate year */ - if (calendar == GREGORIAN_CALENDAR) { - year = (long)(((double)absdate) / 365.2425); - } else if (calendar == JULIAN_CALENDAR) { - year = (long)(((double)absdate) / 365.25); - } else { - Py_Error(DateCalc_Error, "unknown calendar"); - } - if (absdate > 0) year++; - - /* Apply corrections to reach the correct year */ - while (1) { - /* Calculate the year offset */ - yearoffset = dInfoCalc_YearOffset(year,calendar); - if (PyErr_Occurred()) - goto onError; - - /* Backward correction: absdate must be greater than the - yearoffset */ - if (yearoffset >= absdate) { - year--; - continue; - } - - dayoffset = absdate - yearoffset; - leap = dInfoCalc_Leapyear(year,calendar); - - /* Forward correction: non leap years only have 365 days */ - if (dayoffset > 365 && !leap) { - year++; - continue; - } - break; - } - - dinfo->year = year; - dinfo->calendar = calendar; - - /* Now iterate to find the month */ - monthoffset = month_offset[leap]; - { - register int month; - - for (month = 1; month < 13; month++) { - if (monthoffset[month] >= dayoffset) - break; - } - - dinfo->month = month; - dinfo->quarter = monthToQuarter(month); - dinfo->day = dayoffset - month_offset[leap][month-1]; - } - - - dinfo->day_of_week = dInfoCalc_DayOfWeek(absdate); - dinfo->day_of_year = dayoffset; - dinfo->absdate = absdate; - - return 0; - - onError: - return -1; -} - -/* Sets the time part of the DateTime object. */ -static -int dInfoCalc_SetFromAbsTime(struct date_info *dinfo, - double abstime) -{ - int inttime; - int hour,minute; - double second; - - inttime = (int)abstime; - hour = inttime / 3600; - minute = (inttime % 3600) / 60; - second = abstime - (double)(hour*3600 + minute*60); - - dinfo->hour = hour; - dinfo->minute = minute; - dinfo->second = second; - - dinfo->abstime = abstime; - - return 0; -} - -/* Set the instance's value using the given date and time. calendar - may be set to the flags: GREGORIAN_CALENDAR, JULIAN_CALENDAR to - indicate the calendar to be used. */ -static -int dInfoCalc_SetFromAbsDateTime(struct date_info *dinfo, - long absdate, - double abstime, - int calendar) -{ - - /* Bounds check */ - Py_AssertWithArg(abstime >= 0.0 && abstime <= SECONDS_PER_DAY, - DateCalc_Error, - "abstime out of range (0.0 - 86400.0): %f", - abstime); - - /* Calculate the date */ - if (dInfoCalc_SetFromAbsDate(dinfo, - absdate, - calendar)) - goto onError; - - /* Calculate the time */ - if (dInfoCalc_SetFromAbsTime(dinfo, - abstime)) - goto onError; - - return 0; - onError: - return -1; -} - -/* -==================================================== -== End of section borrowed from mx.DateTime == -==================================================== -*/ - - - - - -/////////////////////////////////////////////////////////////////////// - -// helpers for frequency conversion routines // - -static long DtoB_weekday(long fromDate) { return (((fromDate) / 7) * 5) + (fromDate)%7; } - -static long DtoB_WeekendToMonday(long absdate, int day_of_week) { - - if (day_of_week > 4) { - //change to Monday after weekend - absdate += (7 - day_of_week); - } - return DtoB_weekday(absdate); -} - -static long DtoB_WeekendToFriday(long absdate, int day_of_week) { - - if (day_of_week > 4) { - //change to friday before weekend - absdate -= (day_of_week - 4); - } - return DtoB_weekday(absdate); -} - -static long absdate_from_ymd(int y, int m, int d) { - struct date_info tempDate; - if (dInfoCalc_SetFromDateAndTime(&tempDate, y, m, d, 0, 0, 0, GREGORIAN_CALENDAR)) return INT_ERR_CODE; - return tempDate.absdate; -} - - -/////////////////////////////////////////////// - -// frequency specifc conversion routines -// each function must take an integer fromDate and a char relation ('B' or 'A' for 'BEFORE' or 'AFTER') - -//************ FROM DAILY *************** - -static long asfreq_DtoA(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - if (dinfo.month > af_info->to_a_year_end) { return (long)(dinfo.year + 1); } - else { return (long)(dinfo.year); } -} - -static long DtoQ_yq(long fromDate, struct asfreq_info *af_info, - int *year, int *quarter) { - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - if (af_info->to_q_year_end != 12) { - dinfo.month -= af_info->to_q_year_end; - if (dinfo.month <= 0) { dinfo.month += 12; } - else { dinfo.year += 1; } - dinfo.quarter = monthToQuarter(dinfo.month); - } - - *year = dinfo.year; - *quarter = dinfo.quarter; - - return 0; -} - - -static long asfreq_DtoQ(long fromDate, char relation, struct asfreq_info *af_info) { - - int year, quarter; - - if (DtoQ_yq(fromDate, af_info, &year, &quarter) == INT_ERR_CODE) - { return INT_ERR_CODE; } - - return (long)((year - 1) * 4 + quarter); -} - -static long asfreq_DtoM(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - return (long)((dinfo.year - 1) * 12 + dinfo.month); -} - -static long asfreq_DtoW(long fromDate, char relation, struct asfreq_info *af_info) { - return (fromDate - (1 + af_info->to_week_end))/7 + 1; -} - -static long asfreq_DtoB(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (relation == 'B') { - return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); - } else { - return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); - } -} - -static long asfreq_DtoB_forConvert(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, fromDate, - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (dinfo.day_of_week > 4) { - return -1; - } else { - return DtoB_weekday(fromDate); - } -} - -// needed for getDateInfo function -static long asfreq_DtoD(long fromDate, char relation, struct asfreq_info *af_info) { return fromDate; } - -static long asfreq_DtoHIGHFREQ(long fromDate, char relation, long periodsPerDay) { - if (fromDate >= HIGHFREQ_ORIG) { - if (relation == 'B') { return (fromDate - HIGHFREQ_ORIG)*(periodsPerDay) + 1; } - else { return (fromDate - HIGHFREQ_ORIG + 1)*(periodsPerDay); } - } else { return -1; } -} - -static long asfreq_DtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoHIGHFREQ(fromDate, relation, 24); } -static long asfreq_DtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoHIGHFREQ(fromDate, relation, 24*60); } -static long asfreq_DtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoHIGHFREQ(fromDate, relation, 24*60*60); } - -//************ FROM SECONDLY *************** - -static long asfreq_StoD(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/(60*60*24) + HIGHFREQ_ORIG; } - -static long asfreq_StoA(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoA(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_StoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_StoM(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoM(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_StoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_StoB(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_StoB_forConvert(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB_forConvert(asfreq_StoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_StoT(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/60 + 1; } -static long asfreq_StoH(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/(60*60) + 1; } - -//************ FROM MINUTELY *************** - -static long asfreq_TtoD(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/(60*24) + HIGHFREQ_ORIG; } - -static long asfreq_TtoA(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoA(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_TtoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_TtoM(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoM(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_TtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_TtoB(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_TtoB_forConvert(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB_forConvert(asfreq_TtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_TtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/60 + 1; } -static long asfreq_TtoS(long fromDate, char relation, struct asfreq_info *af_info) { - if (relation == 'B') { return fromDate*60 - 59; } - else { return fromDate*60; }} - -//************ FROM HOURLY *************** - -static long asfreq_HtoD(long fromDate, char relation, struct asfreq_info *af_info) - { return (fromDate - 1)/24 + HIGHFREQ_ORIG; } -static long asfreq_HtoA(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoA(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_HtoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_HtoM(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoM(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_HtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } -static long asfreq_HtoB(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_HtoB_forConvert(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoB_forConvert(asfreq_HtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -// calculation works out the same as TtoS, so we just call that function for HtoT -static long asfreq_HtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_TtoS(fromDate, relation, &NULL_AF_INFO); } -static long asfreq_HtoS(long fromDate, char relation, struct asfreq_info *af_info) { - if (relation == 'B') { return fromDate*60*60 - 60*60 + 1; } - else { return fromDate*60*60; }} - -//************ FROM BUSINESS *************** - -static long asfreq_BtoD(long fromDate, char relation, struct asfreq_info *af_info) - { return ((fromDate-1)/5)*7 + (fromDate-1)%5 + 1; } - -static long asfreq_BtoA(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoA(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } - -static long asfreq_BtoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } - -static long asfreq_BtoM(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoM(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_BtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } - -static long asfreq_BtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoH(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_BtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoT(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -static long asfreq_BtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoS(asfreq_BtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -//************ FROM WEEKLY *************** - -static long asfreq_WtoD(long fromDate, char relation, struct asfreq_info *af_info) { - if (relation == 'B') { return fromDate * 7 - 6 + af_info->from_week_end;} - else { return fromDate * 7 + af_info->from_week_end; } -} - -static long asfreq_WtoA(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoA(asfreq_WtoD(fromDate, 'A', af_info), relation, af_info); } -static long asfreq_WtoQ(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoQ(asfreq_WtoD(fromDate, 'A', af_info), relation, af_info); } -static long asfreq_WtoM(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoM(asfreq_WtoD(fromDate, 'A', af_info), relation, &NULL_AF_INFO); } - -static long asfreq_WtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_WtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_WtoB(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_WtoD(fromDate, relation, af_info), - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } - else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } -} - -static long asfreq_WtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoH(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_WtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoT(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_WtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoS(asfreq_WtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } - -//************ FROM MONTHLY *************** - -static void MtoD_ym(long fromDate, long *y, long *m) { - *y = (fromDate - 1) / 12 + 1; - *m = fromDate - 12 * (*y) - 1; -} - -static long asfreq_MtoD(long fromDate, char relation, struct asfreq_info *af_info) { - - long y, m, absdate; - - if (relation == 'B') { - MtoD_ym(fromDate, &y, &m); - if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; - return absdate; - } else { - MtoD_ym(fromDate+1, &y, &m); - if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; - return absdate-1; - } -} - -static long asfreq_MtoA(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoA(asfreq_MtoD(fromDate, 'A', &NULL_AF_INFO), relation, af_info); } - -static long asfreq_MtoQ(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoQ(asfreq_MtoD(fromDate, 'A', &NULL_AF_INFO), relation, af_info); } - -static long asfreq_MtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, af_info); } - -static long asfreq_MtoB(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } - else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } -} - -static long asfreq_MtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoH(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_MtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoT(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } -static long asfreq_MtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoS(asfreq_MtoD(fromDate, relation, &NULL_AF_INFO), relation, &NULL_AF_INFO); } - -//************ FROM QUARTERLY *************** - -static void QtoD_ym(long fromDate, long *y, long *m, struct asfreq_info *af_info) { - - *y = (fromDate - 1) / 4 + 1; - *m = (fromDate + 4) * 3 - 12 * (*y) - 2; - - if (af_info->from_q_year_end != 12) { - *m += af_info->from_q_year_end; - if (*m > 12) { *m -= 12; } - else { *y -= 1; } - } -} - -static long asfreq_QtoD(long fromDate, char relation, struct asfreq_info *af_info) { - - long y, m, absdate; - - if (relation == 'B') { - QtoD_ym(fromDate, &y, &m, af_info); - if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; - return absdate; - } else { - QtoD_ym(fromDate+1, &y, &m, af_info); - if ((absdate = absdate_from_ymd(y, m, 1)) == INT_ERR_CODE) return INT_ERR_CODE; - return absdate - 1; - } -} - -static long asfreq_QtoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_QtoA(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoA(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_QtoM(long fromDate, char relation, struct asfreq_info *af_info) { - return asfreq_DtoM(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } - -static long asfreq_QtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_QtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_QtoB(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_QtoD(fromDate, relation, af_info), - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } - else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } -} - - -static long asfreq_QtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoH(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_QtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoT(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_QtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoS(asfreq_QtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } - - -//************ FROM ANNUAL *************** - -static long asfreq_AtoD(long fromDate, char relation, struct asfreq_info *af_info) { - long absdate, year, final_adj; - int month = (af_info->from_a_year_end + 1) % 12; - - if (relation == 'B') { - if (af_info->from_a_year_end == 12) {year = fromDate;} - else {year = fromDate - 1;} - final_adj = 0; - } else { - if (af_info->from_a_year_end == 12) {year = fromDate+1;} - else {year = fromDate;} - final_adj = -1; - } - absdate = absdate_from_ymd(year, month, 1); - if (absdate == INT_ERR_CODE) return INT_ERR_CODE; - return absdate + final_adj; -} - -static long asfreq_AtoA(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoA(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_AtoQ(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoQ(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_AtoM(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoM(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_AtoW(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoW(asfreq_AtoD(fromDate, relation, af_info), relation, af_info); } - -static long asfreq_AtoB(long fromDate, char relation, struct asfreq_info *af_info) { - - struct date_info dinfo; - if (dInfoCalc_SetFromAbsDate(&dinfo, asfreq_AtoD(fromDate, relation, af_info), - GREGORIAN_CALENDAR)) return INT_ERR_CODE; - - if (relation == 'B') { return DtoB_WeekendToMonday(dinfo.absdate, dinfo.day_of_week); } - else { return DtoB_WeekendToFriday(dinfo.absdate, dinfo.day_of_week); } -} - -static long asfreq_AtoH(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoH(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_AtoT(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoT(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } -static long asfreq_AtoS(long fromDate, char relation, struct asfreq_info *af_info) - { return asfreq_DtoS(asfreq_AtoD(fromDate, relation, af_info), relation, &NULL_AF_INFO); } - -static long nofunc(long fromDate, char relation, struct asfreq_info *af_info) { return -1; } - -// end of frequency specific conversion routines - -// return a pointer to appropriate conversion function -static long (*get_asfreq_func(int fromFreq, int toFreq, int forConvert))(long, char, struct asfreq_info*) { - - int fromGroup = get_freq_group(fromFreq); - int toGroup = get_freq_group(toFreq); - - if (fromGroup == FR_UND) { fromGroup = FR_DAY; } - - switch(fromGroup) - { - case FR_ANN: - switch(toGroup) - { - case FR_ANN: return &asfreq_AtoA; - case FR_QTR: return &asfreq_AtoQ; - case FR_MTH: return &asfreq_AtoM; - case FR_WK: return &asfreq_AtoW; - case FR_BUS: return &asfreq_AtoB; - case FR_DAY: return &asfreq_AtoD; - case FR_HR: return &asfreq_AtoH; - case FR_MIN: return &asfreq_AtoT; - case FR_SEC: return &asfreq_AtoS; - default: return &nofunc; - } - - case FR_QTR: - switch(toGroup) - { - case FR_ANN: return &asfreq_QtoA; - case FR_QTR: return &asfreq_QtoQ; - case FR_MTH: return &asfreq_QtoM; - case FR_WK: return &asfreq_QtoW; - case FR_BUS: return &asfreq_QtoB; - case FR_DAY: return &asfreq_QtoD; - case FR_HR: return &asfreq_QtoH; - case FR_MIN: return &asfreq_QtoT; - case FR_SEC: return &asfreq_QtoS; - default: return &nofunc; - } - - case FR_MTH: - switch(toGroup) - { - case FR_ANN: return &asfreq_MtoA; - case FR_QTR: return &asfreq_MtoQ; - case FR_WK: return &asfreq_MtoW; - case FR_BUS: return &asfreq_MtoB; - case FR_DAY: return &asfreq_MtoD; - case FR_HR: return &asfreq_MtoH; - case FR_MIN: return &asfreq_MtoT; - case FR_SEC: return &asfreq_MtoS; - default: return &nofunc; - } - - case FR_WK: - switch(toGroup) - { - case FR_ANN: return &asfreq_WtoA; - case FR_QTR: return &asfreq_WtoQ; - case FR_MTH: return &asfreq_WtoM; - case FR_WK: return &asfreq_WtoW; - case FR_BUS: return &asfreq_WtoB; - case FR_DAY: return &asfreq_WtoD; - case FR_HR: return &asfreq_WtoH; - case FR_MIN: return &asfreq_WtoT; - case FR_SEC: return &asfreq_WtoS; - default: return &nofunc; - } - - case FR_BUS: - switch(toGroup) - { - case FR_ANN: return &asfreq_BtoA; - case FR_QTR: return &asfreq_BtoQ; - case FR_MTH: return &asfreq_BtoM; - case FR_WK: return &asfreq_BtoW; - case FR_DAY: return &asfreq_BtoD; - case FR_HR: return &asfreq_BtoH; - case FR_MIN: return &asfreq_BtoT; - case FR_SEC: return &asfreq_BtoS; - default: return &nofunc; - } - - case FR_DAY: - switch(toGroup) - { - case FR_ANN: return &asfreq_DtoA; - case FR_QTR: return &asfreq_DtoQ; - case FR_MTH: return &asfreq_DtoM; - case FR_WK: return &asfreq_DtoW; - case FR_BUS: - if (forConvert) { return &asfreq_DtoB_forConvert; } - else { return &asfreq_DtoB; } - case FR_DAY: return &asfreq_DtoD; - case FR_HR: return &asfreq_DtoH; - case FR_MIN: return &asfreq_DtoT; - case FR_SEC: return &asfreq_DtoS; - default: return &nofunc; - } - - case FR_HR: - switch(toGroup) - { - case FR_ANN: return &asfreq_HtoA; - case FR_QTR: return &asfreq_HtoQ; - case FR_MTH: return &asfreq_HtoM; - case FR_WK: return &asfreq_HtoW; - case FR_BUS: - if (forConvert) { return &asfreq_HtoB_forConvert; } - else { return &asfreq_HtoB; } - case FR_DAY: return &asfreq_HtoD; - case FR_MIN: return &asfreq_HtoT; - case FR_SEC: return &asfreq_HtoS; - default: return &nofunc; - } - - case FR_MIN: - switch(toGroup) - { - case FR_ANN: return &asfreq_TtoA; - case FR_QTR: return &asfreq_TtoQ; - case FR_MTH: return &asfreq_TtoM; - case FR_WK: return &asfreq_TtoW; - case FR_BUS: - if (forConvert) { return &asfreq_TtoB_forConvert; } - else { return &asfreq_TtoB; } - case FR_DAY: return &asfreq_TtoD; - case FR_HR: return &asfreq_TtoH; - case FR_SEC: return &asfreq_TtoS; - default: return &nofunc; - } - - case FR_SEC: - switch(toGroup) - { - case FR_ANN: return &asfreq_StoA; - case FR_QTR: return &asfreq_StoQ; - case FR_MTH: return &asfreq_StoM; - case FR_WK: return &asfreq_StoW; - case FR_BUS: - if (forConvert) { return &asfreq_StoB_forConvert; } - else { return &asfreq_StoB; } - case FR_DAY: return &asfreq_StoD; - case FR_HR: return &asfreq_StoH; - case FR_MIN: return &asfreq_StoT; - default: return &nofunc; - } - default: return &nofunc; - } -} - -/* -Helper function for cseries_convert: - determine the size of the second dimension for the resulting - converted array -*/ -static long get_height(int fromFreq, int toFreq) { - - int maxBusDaysPerYear, maxBusDaysPerQuarter, maxBusDaysPerMonth; - int maxDaysPerYear, maxDaysPerQuarter, maxDaysPerMonth; - - int fromGroup = get_freq_group(fromFreq); - int toGroup = get_freq_group(toFreq); - - if (fromGroup == FR_UND) { fromGroup = FR_DAY; } - - maxBusDaysPerYear = 262; - maxBusDaysPerQuarter = 66; - maxBusDaysPerMonth = 23; - - maxDaysPerYear = 366; - maxDaysPerQuarter = 92; - maxDaysPerMonth = 31; - - - - switch(fromGroup) - { - case FR_ANN: return 1; - case FR_QTR: - switch(toGroup) - { - case FR_ANN: return 4; - default: return 1; - } - case FR_MTH: //monthly - switch(toGroup) - { - case FR_ANN: return 12; - case FR_QTR: return 3; - default: return 1; - } - case FR_WK: //weekly - switch(toGroup) - { - case FR_ANN: return 53; - case FR_QTR: return 13; - case FR_MTH: return 4; - default: return 1; - } - case FR_BUS: //business - switch(toGroup) - { - case FR_ANN: return maxBusDaysPerYear;; - case FR_QTR: return maxBusDaysPerQuarter; - case FR_MTH: return maxBusDaysPerMonth; - case FR_WK: return 5; - default: return 1; - } - case FR_DAY: //daily - switch(toGroup) - { - case FR_ANN: return maxDaysPerYear;; - case FR_QTR: return maxDaysPerQuarter; - case FR_MTH: return maxDaysPerMonth; - case FR_WK: return 7; - default: return 1; - } - case FR_HR: //hourly - switch(toGroup) - { - case FR_ANN: return 24 * maxDaysPerYear;; - case FR_QTR: return 24 * maxDaysPerQuarter; - case FR_MTH: return 24 * maxDaysPerMonth; - case FR_WK: return 24 * 7; - case FR_DAY: return 24; - case FR_BUS: return 24; - default: return 1; - } - case FR_MIN: //minutely - switch(toGroup) - { - case FR_ANN: return 24 * 60 * maxDaysPerYear;; - case FR_QTR: return 24 * 60 * maxDaysPerQuarter; - case FR_MTH: return 24 * 60 * maxDaysPerMonth; - case FR_WK: return 24 * 60 * 7; - case FR_DAY: return 24 * 60; - case FR_BUS: return 24 * 60; - case FR_HR: return 60; - default: return 1; - } - case FR_SEC: //minutely - switch(toGroup) - { - case FR_ANN: return 24 * 60 * 60 * maxDaysPerYear;; - case FR_QTR: return 24 * 60 * 60 * maxDaysPerQuarter; - case FR_MTH: return 24 * 60 * 60 * maxDaysPerMonth; - case FR_WK: return 24 * 60 * 60 * 7; - case FR_DAY: return 24 * 60 * 60; - case FR_BUS: return 24 * 60 * 60; - case FR_HR: return 60 * 60; - case FR_MIN: return 60; - default: return 1; - } - default: return 1; - } -} - -static int calc_a_year_end(int freq, int group) { - int result = (freq - group) % 12; - if (result == 0) {return 12;} - else {return result;} -} - -static int calc_week_end(int freq, int group) { - return freq - group; -} - -static void get_asfreq_info(int fromFreq, int toFreq, struct asfreq_info *af_info) { - - int fromGroup = get_freq_group(fromFreq); - int toGroup = get_freq_group(toFreq); - - switch(fromGroup) - { - case FR_WK: { - af_info->from_week_end = calc_week_end(fromFreq, fromGroup); - } break; - case FR_ANN: { - af_info->from_a_year_end = calc_a_year_end(fromFreq, fromGroup); - } break; - case FR_QTR: { - af_info->from_q_year_end = calc_a_year_end(fromFreq, fromGroup); - } break; - - } - - switch(toGroup) - { - case FR_WK: { - af_info->to_week_end = calc_week_end(toFreq, toGroup); - } break; - case FR_ANN: { - af_info->to_a_year_end = calc_a_year_end(toFreq, toGroup); - } break; - case FR_QTR: { - af_info->to_q_year_end = calc_a_year_end(toFreq, toGroup); - } break; - } - -} - -static double getAbsTime(int freq, long dailyDate, long originalDate) { - - long startOfDay, periodsPerDay; - - switch(freq) - { - case FR_HR: - periodsPerDay = 24; - break; - case FR_MIN: - periodsPerDay = 24*60; - break; - case FR_SEC: - periodsPerDay = 24*60*60; - break; - default: - return 0; - } - - startOfDay = asfreq_DtoHIGHFREQ(dailyDate, 'B', periodsPerDay); - return (24*60*60)*((double)(originalDate - startOfDay))/((double)periodsPerDay); -} - -/************************************************************ -** Date type definition -************************************************************/ - -typedef struct { - PyObject_HEAD - int freq; /* frequency of date */ - int value; /* integer representation of date */ - PyObject* cached_vals; -} DateObject; - -/* Forward declarations */ -static PyTypeObject DateType; -#define DateObject_Check(op) PyObject_TypeCheck(op, &DateType) - -static void -DateObject_dealloc(DateObject* self) { - Py_XDECREF(self->cached_vals); - self->ob_type->tp_free((PyObject*)self); -} - - -static PyObject *freq_dict, *freq_dict_rev, *freq_constants; - -#define DICT_SETINT_STRKEY(dict, key, val) \ - {PyObject *pyval = PyInt_FromLong(val); \ - PyDict_SetItemString(dict, key, pyval); \ - Py_DECREF(pyval); } - -#define ADD_FREQ_CONSTANT(const_name, val) \ - DICT_SETINT_STRKEY(freq_constants, const_name, val) - -#define INIT_FREQ(const_name, key, aliases) \ - {PyObject *pykey = PyInt_FromLong(key); \ - PyDict_SetItem(freq_dict, pykey, aliases); \ - PyDict_SetItemString(freq_constants, const_name, pykey); \ - Py_DECREF(pykey); \ - Py_DECREF(aliases); } - - -static int init_freq_group(int num_items, int num_roots, int base_const, - char item_abbrevs[][2][10], char group_prefixes[][15], - char item_const_names[][15]) { - - int i; - - for (i = 0; i < num_items; i++) { - - PyObject *aliases; - int j, size, k; - - if (i == 0) { k = 3; } else { k = 2; } - - size = num_roots * k; - - aliases = PyTuple_New(size); - - for (j = 0; j < num_roots; j++) { - PyObject *alias_v1, *alias_v2; - char *root, *alt; - - if ((root = malloc((30) * sizeof(char))) == NULL) return INT_ERR_CODE; - if ((alt = malloc((30) * sizeof(char))) == NULL) return INT_ERR_CODE; - - strcpy(root, group_prefixes[j]); - strcpy(alt, group_prefixes[j]); - - if (i == 0) { - PyObject *alias = PyString_FromString(root); - PyTuple_SET_ITEM(aliases, j*k + 2, alias); - } - - strcat(root, "-"); - strcat(root, item_abbrevs[i][0]); - strcat(alt, "-"); - strcat(alt, item_abbrevs[i][1]); - - alias_v1 = PyString_FromString(root); - alias_v2 = PyString_FromString(alt); - - free(root); - free(alt); - - PyTuple_SET_ITEM(aliases, j*k, alias_v1); - PyTuple_SET_ITEM(aliases, j*k + 1, alias_v2); - } - - INIT_FREQ(item_const_names[i], base_const+i, aliases); - } - - return 0; -} - -/* take a dictionary with integer keys and tuples of strings for values, - and populate a dictionary with all the strings as keys and integers - for values */ -static int reverse_dict(PyObject *source, PyObject *dest) { - - PyObject *key, *value; - - Py_ssize_t pos = 0; - - while (PyDict_Next(source, &pos, &key, &value)) { - PyObject *tuple_iter; - PyObject *item; - - if((tuple_iter = PyObject_GetIter(value)) == NULL) return INT_ERR_CODE; - - while ((item = PyIter_Next(tuple_iter)) != NULL) { - PyDict_SetItem(dest, item, key); - Py_DECREF(item); - } - Py_DECREF(tuple_iter); - } - return 0; -} - -static int build_freq_dict(void) { - - char ANN_prefixes[8][15] = { "A", "Y", "ANN", "ANNUAL", "ANNUALLY", - "YR", "YEAR", "YEARLY" }; - - char QTRE_prefixes[8][15] = { "Q", "QTR", "QUARTER", "QUARTERLY", "Q-E", - "QTR-E", "QUARTER-E", "QUARTERLY-E"}; - char QTRS_prefixes[4][15] = { "Q-S", "QTR-S", "QUARTER-S", "QUARTERLY-S" }; - - char WK_prefixes[4][15] = { "W", "WK", "WEEK", "WEEKLY" }; - - /* Note: order of this array must match up with how the Annual - frequency constants are lined up */ - char month_names[12][2][10] = { - { "DEC", "DECEMBER" }, - { "JAN", "JANUARY" }, - { "FEB", "FEBRUARY" }, - { "MAR", "MARCH" }, - { "APR", "APRIL" }, - { "MAY", "MAY" }, - { "JUN", "JUNE" }, - { "JUL", "JULY" }, - { "AUG", "AUGUST" }, - { "SEP", "SEPTEMBER" }, - { "OCT", "OCTOBER" }, - { "NOV", "NOVEMBER" }}; - - char day_names[7][2][10] = { - { "SUN", "SUNDAY" }, - { "MON", "MONDAY" }, - { "TUE", "TUESDAY" }, - { "WED", "WEDNESDAY" }, - { "THU", "THURSDAY" }, - { "FRI", "FRIDAY" }, - { "SAT", "SATURDAY" }}; - - char ANN_const_names[12][15] = { - "FR_ANNDEC", - "FR_ANNJAN", - "FR_ANNFEB", - "FR_ANNMAR", - "FR_ANNAPR", - "FR_ANNMAY", - "FR_ANNJUN", - "FR_ANNJUL", - "FR_ANNAUG", - "FR_ANNSEP", - "FR_ANNOCT", - "FR_ANNNOV"}; - - char QTRE_const_names[12][15] = { - "FR_QTREDEC", - "FR_QTREJAN", - "FR_QTREFEB", - "FR_QTREMAR", - "FR_QTREAPR", - "FR_QTREMAY", - "FR_QTREJUN", - "FR_QTREJUL", - "FR_QTREAUG", - "FR_QTRESEP", - "FR_QTREOCT", - "FR_QTRENOV"}; - - char QTRS_const_names[12][15] = { - "FR_QTRSDEC", - "FR_QTRSJAN", - "FR_QTRSFEB", - "FR_QTRSMAR", - "FR_QTRSAPR", - "FR_QTRSMAY", - "FR_QTRSJUN", - "FR_QTRSJUL", - "FR_QTRSAUG", - "FR_QTRSSEP", - "FR_QTRSOCT", - "FR_QTRSNOV"}; - - char WK_const_names[7][15] = { - "FR_WKSUN", - "FR_WKMON", - "FR_WKTUE", - "FR_WKWED", - "FR_WKTHU", - "FR_WKFRI", - "FR_WKSAT"}; - - PyObject *aliases; - - freq_dict = PyDict_New(); - freq_dict_rev = PyDict_New(); - freq_constants = PyDict_New(); - - aliases = Py_BuildValue("(ssss)", "M", "MTH", "MONTH", "MONTHLY"); - INIT_FREQ("FR_MTH", FR_MTH, aliases); - - aliases = Py_BuildValue("(ssss)", "B", "BUS", "BUSINESS", "BUSINESSLY"); - INIT_FREQ("FR_BUS", FR_BUS, aliases); - - aliases = Py_BuildValue("(ssss)", "D", "DAY", "DLY", "DAILY"); - INIT_FREQ("FR_DAY", FR_DAY, aliases); - - aliases = Py_BuildValue("(sssss)", "H", "HR", "HOUR", "HRLY", "HOURLY"); - INIT_FREQ("FR_HR", FR_HR, aliases); - - aliases = Py_BuildValue("(ssss)", "T", "MIN", "MINUTE", "MINUTELY"); - INIT_FREQ("FR_MIN", FR_MIN, aliases); - - aliases = Py_BuildValue("(ssss)", "S", "SEC", "SECOND", "SECONDLY"); - INIT_FREQ("FR_SEC", FR_SEC, aliases); - - aliases = Py_BuildValue("(ssss)", "U", "UND", "UNDEF", "UNDEFINED"); - INIT_FREQ("FR_UND", FR_UND, aliases); - - ADD_FREQ_CONSTANT("FR_ANN", FR_ANN); - - if(init_freq_group(12, 8, FR_ANN, - month_names, ANN_prefixes, ANN_const_names) == INT_ERR_CODE) { - return INT_ERR_CODE; - } - - ADD_FREQ_CONSTANT("FR_QTR", FR_QTR); - - if(init_freq_group(12, 8, FR_QTREDEC, - month_names, QTRE_prefixes, QTRE_const_names) == INT_ERR_CODE) { - return INT_ERR_CODE; - } - - if(init_freq_group(12, 4, FR_QTRSDEC, - month_names, QTRS_prefixes, QTRS_const_names) == INT_ERR_CODE) { - return INT_ERR_CODE; - } - - ADD_FREQ_CONSTANT("FR_WK", FR_WK); - - if(init_freq_group(7, 4, FR_WK, - day_names, WK_prefixes, WK_const_names) == INT_ERR_CODE) { - return INT_ERR_CODE; - } - - if(reverse_dict(freq_dict, freq_dict_rev) == INT_ERR_CODE) { - return INT_ERR_CODE; - } - - return 0; -} - - -/* take user specified frequency and convert to int representation - of the frequency */ -static int check_freq(PyObject *freq_spec) { - - if (PyInt_Check(freq_spec)) { - return (int)PyInt_AsLong(freq_spec); - } else if (PyString_Check(freq_spec)) { - char *freq_str, *freq_str_uc; - PyObject *freq_val; - - freq_str = PyString_AsString(freq_spec); - if((freq_str_uc = str_uppercase(freq_str)) == NULL) {return INT_ERR_CODE;} - - freq_val = PyDict_GetItemString(freq_dict_rev, freq_str_uc); - - free(freq_str_uc); - - if (freq_val == NULL) { - PyErr_SetString(PyExc_ValueError, "invalid frequency specification"); - return INT_ERR_CODE; - } else { - int ret_val = (int)PyInt_AsLong(freq_val); - return ret_val; - } - } else if (freq_spec == Py_None) { - return FR_UND; - } else { - int retval = (int)PyInt_AsLong(freq_spec); - if (PyErr_Occurred()) { - PyErr_SetString(PyExc_ValueError, "invalid frequency specification"); - return INT_ERR_CODE; - } else { return retval; } - } - -} - -static PyObject * -DateObject_new(PyTypeObject *type, PyObject *args, PyObject *kwds) { - - DateObject *self; - - self = (DateObject*)type->tp_alloc(type, 0); - if (self != NULL) { - // initialize attributes that need initializing in here - self->freq = FR_UND; - self->value = -1; - } - - return (PyObject *)self; -} - -/* for use in C code */ -static DateObject * -DateObject_New(void) { - PyObject *dummy; - return (DateObject*)DateObject_new(&DateType, dummy, dummy); -} - -#define INIT_ERR(errortype, errmsg) PyErr_SetString(errortype,errmsg);return -1 - -static int -DateObject_init(DateObject *self, PyObject *args, PyObject *kwds) { - - PyObject *freq=NULL, *value=NULL, *datetime=NULL, *string=NULL; - char *INSUFFICIENT_MSG = "insufficient parameters to initialize Date"; - - int def_info=INT_ERR_CODE; - - int year=def_info, month=def_info, day=def_info, quarter=def_info, - hour=def_info, minute=def_info, second=def_info; - - int free_dt=0; - - static char *kwlist[] = {"freq", "value", "string", - "year", "month", "day", "quarter", - "hour", "minute", "second", - "datetime", NULL}; - - if (! PyArg_ParseTupleAndKeywords(args, kwds, "O|OOiiiiiiiO", kwlist, - &freq, &value, &string, - &year, &month, &day, &quarter, - &hour, &minute, &second, - &datetime)) return -1; - - if (PyObject_HasAttrString(freq, "freq")) { - PyObject *freq_attr = PyObject_GetAttrString(freq, "freq"); - self->freq = PyInt_AS_LONG(freq_attr); - Py_DECREF(freq_attr); - } else { - if((self->freq = check_freq(freq)) == INT_ERR_CODE) return -1; - } - - if ((value && PyString_Check(value)) || string) { - - PyObject *string_arg = PyTuple_New(1); - int freq_group = get_freq_group(self->freq); - - free_dt = 1; - - if (!string) { - string = value; - } - - PyTuple_SET_ITEM(string_arg, 0, string); - Py_INCREF(string); - - if (freq_group == FR_HR || - freq_group == FR_MIN || - freq_group == FR_SEC) - { datetime = PyEval_CallObject(DateTimeFromString, string_arg); } - else { datetime = PyEval_CallObject(DateFromString, string_arg); } - - Py_DECREF(string_arg); - - value = NULL; - } - - if (value) { - self->value = PyInt_AsLong(value); - } else { - - int freq_group = get_freq_group(self->freq); - - if (datetime) { - year=PyDateTime_GET_YEAR(datetime); - month=PyDateTime_GET_MONTH(datetime); - day=PyDateTime_GET_DAY(datetime); - hour=PyDateTime_DATE_GET_HOUR(datetime); - minute=PyDateTime_DATE_GET_MINUTE(datetime); - second=PyDateTime_DATE_GET_SECOND(datetime); - } - - if (!datetime) { - - // First, some basic checks..... - if (year == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - if (self->freq == FR_BUS || - self->freq == FR_DAY || - self->freq == FR_WK || - self->freq == FR_UND) { - if (month == def_info || day == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - - // if FR_BUS, check for week day - - } else if (self->freq == FR_MTH) { - if (month == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - } else if (freq_group == FR_QTR) { - if (quarter == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - } else if (self->freq == FR_SEC) { - if (month == def_info || - day == def_info || - second == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - if (hour == def_info) { - hour = second/3600; - minute = (second % 3600)/60; - second = second % 60; - } else if (minute == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - } else if (self->freq == FR_MIN) { - if (month == def_info || - day == def_info || - minute == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - if (hour == def_info) { - hour = minute/60; - minute = minute % 60; - } - } else if (self->freq == FR_HR) { - if (month == def_info || - day == def_info || - hour == def_info) { - INIT_ERR(PyExc_ValueError, INSUFFICIENT_MSG); - } - } - - } - - if (self->freq == FR_SEC) { - long absdays, delta; - absdays = absdate_from_ymd(year, month, day); - delta = (absdays - HIGHFREQ_ORIG); - self->value = (int)(delta*86400 + hour*3600 + minute*60 + second + 1); - } else if (self->freq == FR_MIN) { - long absdays, delta; - absdays = absdate_from_ymd(year, month, day); - delta = (absdays - HIGHFREQ_ORIG); - self->value = (int)(delta*1440 + hour*60 + minute + 1); - } else if (self->freq == FR_HR) { - long absdays, delta; - if((absdays = absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; - delta = (absdays - HIGHFREQ_ORIG); - self->value = (int)(delta*24 + hour + 1); - } else if (self->freq == FR_DAY) { - if((self->value = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; - } else if (self->freq == FR_UND) { - if((self->value = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; - } else if (self->freq == FR_BUS) { - long weeks, days; - if((days = absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; - weeks = days/7; - self->value = (int)(days - weeks*2); - } else if (freq_group == FR_WK) { - int adj_ordinal, ordinal, day_adj; - if((ordinal = (int)absdate_from_ymd(year, month, day)) == INT_ERR_CODE) return -1; - day_adj = (7 - (self->freq - FR_WK)) % 7; - adj_ordinal = ordinal + ((7 - day_adj) - ordinal % 7) % 7; - self->value = adj_ordinal/7; - } else if (self->freq == FR_MTH) { - self->value = (year-1)*12 + month; - } else if (freq_group == FR_QTR) { - if ((self->freq - freq_group) > 12) { - // quarterly frequency with year determined by ending period - self->value = year*4 + quarter; - } else { - /* quarterly frequency with year determined by ending period - or has December year end*/ - self->value = (year-1)*4 + quarter; - } - } else if (freq_group == FR_ANN) { - self->value = year; - } - - } - - if (free_dt) { Py_DECREF(datetime); } - - return 0; -} - -static PyMemberDef DateObject_members[] = { - {"freq", T_INT, offsetof(DateObject, freq), 0, - "frequency"}, - {"value", T_INT, offsetof(DateObject, value), 0, - "integer representation of the Date"}, - {NULL} /* Sentinel */ -}; - -static char DateObject_toordinal_doc[] = -"Return the proleptic Gregorian ordinal of the date, where January 1 of\n" -"year 1 has ordinal 1"; -static PyObject * -DateObject_toordinal(DateObject* self) -{ - if (self->freq == FR_DAY) { - return PyInt_FromLong(self->value); - } else { - long (*toDaily)(long, char, struct asfreq_info*) = NULL; - struct asfreq_info af_info; - - toDaily = get_asfreq_func(self->freq, FR_DAY, 0); - get_asfreq_info(self->freq, FR_DAY, &af_info); - - return PyInt_FromLong(toDaily(self->value, 'A', &af_info)); - } -} - -static char DateObject_asfreq_doc[] = -"Returns a date converted to a specified frequency.\n\n" -":Parameters:\n" -" - freq : string/int\n" -" Frequency to convert the Date to. Accepts any valid frequency\n" -" specification (string or integer)\n" -" - relation :string *['After']*\n" -" Applies only when converting a lower frequency Date to a higher\n" -" frequency Date, or when converting a weekend Date to a business\n" -" frequency Date. Valid values are 'before', 'after', 'b', and 'a'."; -static PyObject * -DateObject_asfreq(DateObject *self, PyObject *args, PyObject *kwds) -{ - - PyObject *freq=NULL; - char *relation_raw=NULL; - char *relation_uc; - char relation; - int invalid_relation=0; - int toFreq; - int result_val; - DateObject *result = DateObject_New(); - - static char *kwlist[] = {"freq", "relation", NULL}; - - long (*asfreq_func)(long, char, struct asfreq_info*) = NULL; - struct asfreq_info af_info; - - if (! PyArg_ParseTupleAndKeywords(args, kwds, "O|s", kwlist, - &freq, &relation_raw)) return NULL; - - if(relation_raw) { - if (strlen(relation_raw) > 0) { - if((relation_uc = str_uppercase(relation_raw)) == NULL) - {return PyErr_NoMemory();} - - if (strcmp(relation_uc, "BEFORE") == 0 || - strcmp(relation_uc, "B") == 0 || - strcmp(relation_uc, "AFTER") == 0 || - strcmp(relation_uc, "A") == 0) { - relation = relation_uc[0]; - } else { invalid_relation=1; } - } else { - invalid_relation=1; - } - - if (invalid_relation) { - PyErr_SetString(PyExc_ValueError,"Invalid relation specification"); - return NULL; - } - } else { - relation = 'A'; - } - - if ((toFreq = check_freq(freq)) == INT_ERR_CODE) return NULL; - - get_asfreq_info(self->freq, toFreq, &af_info); - asfreq_func = get_asfreq_func(self->freq, toFreq, 0); - - result_val = asfreq_func(self->value, relation, &af_info); - - result->freq = toFreq; - result->value = result_val; - - return (PyObject*)result; - -} - -static char DateObject_strfmt_doc[] = -"Returns string representation of Date object according to format specified.\n\n" -":Parameters:\n" -" - fmt : string\n" -" Formatting string. Uses the same directives as in the time.strftime\n" -" function in the standard Python time module. In addition, a few other\n" -" directives are supported:\n" -" %q - the 'quarter' of the date\n" -" %f - Year without century as a decimal number [00,99]. The\n" -" 'year' in this case is the year of the date determined by\n" -" the year for the current quarter. This is the same as %y\n" -" unless the Date is one of the 'qtr-s' frequencies\n" -" %F - Year with century as a decimal number. The 'year' in this\n" -" case is the year of the date determined by the year for\n" -" the current quarter. This is the same as %Y unless the\n" -" Date is one of the 'qtr-s' frequencies\n"; -static PyObject * -DateObject_strfmt(DateObject *self, PyObject *args) -{ - - char *orig_fmt_str, *fmt_str; - char *result; - - int num_extra_fmts = 3; - - char extra_fmts[3][2][10] = {{"%q", "^`AB`^"}, - {"%f", "^`CD`^"}, - {"%F", "^`EF`^"}}; - - int extra_fmts_found[3] = {0,0,0}; - int extra_fmts_found_one = 0; - struct tm c_date; - struct date_info tempDate; - long absdate; - double abstime; - int i, result_len; - PyObject *py_result; - - long (*toDaily)(long, char, struct asfreq_info*) = NULL; - struct asfreq_info af_info; - - if (!PyArg_ParseTuple(args, "s:strfmt(fmt)", &orig_fmt_str)) return NULL; - - toDaily = get_asfreq_func(self->freq, FR_DAY, 0); - get_asfreq_info(self->freq, FR_DAY, &af_info); - - absdate = toDaily(self->value, 'A', &af_info); - abstime = getAbsTime(self->freq, absdate, self->value); - - if(dInfoCalc_SetFromAbsDateTime(&tempDate, absdate, abstime, - GREGORIAN_CALENDAR)) return NULL; - - // populate standard C date struct with info from our date_info struct - c_date.tm_sec = (int)tempDate.second; - c_date.tm_min = tempDate.minute; - c_date.tm_hour = tempDate.hour; - c_date.tm_mday = tempDate.day; - c_date.tm_mon = tempDate.month - 1; - c_date.tm_year = tempDate.year - 1900; - c_date.tm_wday = tempDate.day_of_week; - c_date.tm_yday = tempDate.day_of_year; - c_date.tm_isdst = -1; - - result_len = strlen(orig_fmt_str) + 50; - if ((result = malloc(result_len * sizeof(char))) == NULL) {return PyErr_NoMemory();} - - fmt_str = orig_fmt_str; - - // replace any special format characters with their place holder - for(i=0; i < num_extra_fmts; i++) { - char *special_loc; - if ((special_loc = strstr(fmt_str,extra_fmts[i][0])) != NULL) { - char *tmp_str = fmt_str; - fmt_str = str_replace(fmt_str, extra_fmts[i][0], - extra_fmts[i][1]); - /* only free the previous loop value if this is not the first - special format string found */ - if (extra_fmts_found_one) { free(tmp_str); } - - if (fmt_str == NULL) {return NULL;} - - extra_fmts_found[i] = 1; - extra_fmts_found_one = 1; - } - } - - strftime(result, result_len, fmt_str, &c_date); - if (extra_fmts_found_one) { free(fmt_str); } - - // replace any place holders with the appropriate value - for(i=0; i < num_extra_fmts; i++) { - if (extra_fmts_found[i]) { - char *tmp_str = result; - char *extra_str; - - if (strcmp(extra_fmts[i][0], "%q") == 0 || - strcmp(extra_fmts[i][0], "%f") == 0 || - strcmp(extra_fmts[i][0], "%F") == 0) { - - struct asfreq_info af_info; - int qtr_freq, year, quarter, year_len; - - if (get_freq_group(self->freq) == FR_QTR) { - qtr_freq = self->freq; - } else { qtr_freq = FR_QTR; } - get_asfreq_info(FR_DAY, qtr_freq, &af_info); - - if(DtoQ_yq(absdate, &af_info, &year, &quarter) == INT_ERR_CODE) - { return NULL; } - - if(strcmp(extra_fmts[i][0], "%q") == 0) { - if ((extra_str = malloc(2 * sizeof(char))) == NULL) { - free(tmp_str); - return PyErr_NoMemory(); - } - sprintf(extra_str, "%i", quarter); - } else { - if ((qtr_freq % 1000) > 12) { year -= 1; } - - if (strcmp(extra_fmts[i][0], "%f") == 0) { - year_len = 2; - year = year % 100; - } else { year_len = 4; } - - if ((extra_str = malloc((year_len+1) * sizeof(char))) == NULL) { - free(tmp_str); - return PyErr_NoMemory(); - } - - if (year_len == 2 && year < 10) { - sprintf(extra_str, "0%i", year); - } else { sprintf(extra_str, "%i", year); } - } - - } else { - PyErr_SetString(PyExc_RuntimeError,"Unrecogized fmt string"); - return NULL; - } - - result = str_replace(result, extra_fmts[i][1], extra_str); - free(tmp_str); - free(extra_str); - if (result == NULL) { return NULL; } - } - } - - py_result = PyString_FromString(result); - free(result); - - return py_result; -} - -static PyObject * -DateObject___str__(DateObject* self) -{ - - int freq_group = get_freq_group(self->freq); - PyObject *string_arg, *retval; - - string_arg = NULL; - if (freq_group == FR_ANN) { string_arg = Py_BuildValue("(s)", "%Y"); } - else if (freq_group == FR_QTR) { string_arg = Py_BuildValue("(s)", "%FQ%q"); } - else if (freq_group == FR_MTH) { string_arg = Py_BuildValue("(s)", "%b-%Y"); } - else if (freq_group == FR_DAY || - freq_group == FR_BUS || - freq_group == FR_WK || - freq_group == FR_UND) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y"); } - else if (freq_group == FR_HR) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:00"); } - else if (freq_group == FR_MIN) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:%M"); } - else if (freq_group == FR_SEC) { string_arg = Py_BuildValue("(s)", "%d-%b-%Y %H:%M:%S"); } - - if (string_arg == NULL) { return NULL; } - - retval = DateObject_strfmt(self, string_arg); - Py_DECREF(string_arg); - - return retval; -} - -static PyObject * -DateObject_freqstr(DateObject *self, void *closure) { - PyObject *key = PyInt_FromLong(self->freq); - PyObject *freq_aliases = PyDict_GetItem(freq_dict, key); - PyObject *main_alias = PyTuple_GET_ITEM(freq_aliases, 0); - Py_DECREF(key); - Py_INCREF(main_alias); - return main_alias; -} - - -static PyObject * -DateObject___repr__(DateObject* self) -{ - PyObject *py_str_rep, *py_freqstr, *py_repr; - char *str_rep, *freqstr, *repr; - int repr_len; - - py_str_rep = DateObject___str__(self); - if (py_str_rep == NULL) { return NULL; } - - py_freqstr = DateObject_freqstr(self, NULL); - - str_rep = PyString_AsString(py_str_rep); - freqstr = PyString_AsString(py_freqstr); - - repr_len = strlen(str_rep) + strlen(freqstr) + 6; - - if((repr = malloc((repr_len + 1) * sizeof(char))) == NULL) - { return PyErr_NoMemory(); } - - strcpy(repr, "<"); - strcat(repr, freqstr); - strcat(repr, " : "); - strcat(repr, str_rep); - strcat(repr, ">"); - - py_repr = PyString_FromString(repr); - - Py_DECREF(py_str_rep); - Py_DECREF(py_freqstr); - - free(repr); - - return py_repr; -} - -/****************************** - These methods seem rather useless. May or may not implement them. -fromordinal(self, ordinal): - return Date(self.freq, datetime=dt.datetime.fromordinal(ordinal)) -tostring(self): - return str(self) -toobject(self): - return self -isvalid(self): - return True -*******************************/ - - -static DateObject * -DateObject_FromFreqAndValue(int freq, int value) { - - DateObject *result = DateObject_New(); - - PyObject *args = PyTuple_New(0); - PyObject *kw = PyDict_New(); - PyObject *py_freq = PyInt_FromLong(freq); - PyObject *py_value = PyInt_FromLong(value); - - PyDict_SetItemString(kw, "freq", py_freq); - PyDict_SetItemString(kw, "value", py_value); - - Py_DECREF(py_freq); - Py_DECREF(py_value); - - DateObject_init(result, args, kw); - - Py_DECREF(args); - Py_DECREF(kw); - - return result; -} - -static PyObject * -DateObject_date_plus_int(PyObject *date, PyObject *pyint) { - DateObject *dateobj = (DateObject*)date; - if (DateObject_Check(pyint)) { - PyErr_SetString(PyExc_TypeError, "Cannot add two Date objects"); - return NULL; - } - - return (PyObject*)DateObject_FromFreqAndValue(dateobj->freq, PyInt_AsLong(pyint) + dateobj->value); -} - -static PyObject * -DateObject___add__(PyObject *left, PyObject *right) -{ - if (DateObject_Check(left)) { - return DateObject_date_plus_int(left, right); - } else { - return DateObject_date_plus_int(right, left); - } -} - -static PyObject * -DateObject___subtract__(PyObject *left, PyObject *right) -{ - int result; - DateObject *dleft; - if (!DateObject_Check(left)) { - PyErr_SetString(PyExc_ValueError, "Cannot subtract Date from non-Date value"); - return NULL; - } - - dleft = (DateObject*)left; - - if (DateObject_Check(right)) { - DateObject *dright = (DateObject*)right; - if (dleft->freq != dright->freq) { - PyErr_SetString(PyExc_ValueError, "Cannot subtract Dates with different frequency"); - return NULL; - } - result = dleft->value - dright->value; - return PyInt_FromLong(result); - } else { - result = dleft->value - PyInt_AsLong(right); - return (PyObject*)DateObject_FromFreqAndValue(dleft->freq, result); - } -} - -static int -DateObject___compare__(DateObject * obj1, DateObject * obj2) -{ - if (obj1->freq != obj2->freq) { - PyErr_SetString(PyExc_ValueError, - "Cannot compare dates with different frequency"); - return -1; - } - - if (obj1->value < obj2->value) return -1; - if (obj1->value > obj2->value) return 1; - if (obj1->value == obj2->value) return 0; - return -1; -} - -static long -DateObject___hash__(DateObject *self) -{ - register int freq_group = get_freq_group(self->freq); - - /* within a given frequency, hash values are guaranteed to be unique - for different dates. For different frequencies, we make a reasonable - effort to ensure hash values will be unique, but it is not guaranteed */ - if (freq_group == FR_BUS) { - return self->value + 10000000; - } else if (freq_group == FR_WK) { - return self->value + 100000000; - } else { return self->value; } -} - -static PyObject * -DateObject___int__(DateObject *self) -{ - return PyInt_FromLong(self->value); -} - -static PyObject * -DateObject___float__(DateObject *self) -{ - return PyFloat_FromDouble((double)(self->value)); -} - -/*************************************************** - ====== Date Properties ====== -****************************************************/ - -// helper function for date property funcs -static int -DateObject_set_date_info(DateObject *self, struct date_info *dinfo) { - PyObject *daily_obj = DateObject_toordinal(self); - long absdate = PyInt_AsLong(daily_obj); - - Py_DECREF(daily_obj); - - if(dInfoCalc_SetFromAbsDate(dinfo, absdate, - GREGORIAN_CALENDAR)) return -1; - - return 0; -} - -// helper function for date property funcs -static int -DateObject_set_date_info_wtime(DateObject *self, struct date_info *dinfo) { - PyObject *daily_obj = DateObject_toordinal(self); - long absdate = PyInt_AsLong(daily_obj); - double abstime; - - Py_DECREF(daily_obj); - - abstime = getAbsTime(self->freq, absdate, self->value); - - if(dInfoCalc_SetFromAbsDateTime(dinfo, absdate, abstime, - GREGORIAN_CALENDAR)) return -1; - - return 0; -} - -static PyObject * -DateObject_year(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.year); -} - -static int _DateObject_quarter_year(DateObject *self, int *year, int *quarter) { - - PyObject *daily_obj; - long absdate; - - struct asfreq_info af_info; - int qtr_freq; - - daily_obj = DateObject_toordinal(self); - absdate = PyInt_AsLong(daily_obj); - Py_DECREF(daily_obj); - - if (get_freq_group(self->freq) == FR_QTR) { - qtr_freq = self->freq; - } else { qtr_freq = FR_QTR; } - get_asfreq_info(FR_DAY, qtr_freq, &af_info); - - if(DtoQ_yq(absdate, &af_info, year, quarter) == INT_ERR_CODE) - { return INT_ERR_CODE; } - - if ((qtr_freq % 1000) > 12) { *year -= 1; } - - return 0; -} - -static PyObject * -DateObject_qyear(DateObject *self, void *closure) { - int year, quarter; - if(_DateObject_quarter_year(self, - &year, &quarter) == INT_ERR_CODE) { return NULL; } - return PyInt_FromLong(year); -} - -static PyObject * -DateObject_quarter(DateObject *self, void *closure) { - int year, quarter; - if(_DateObject_quarter_year(self, - &year, &quarter) == INT_ERR_CODE) { return NULL; } - return PyInt_FromLong(quarter); -} - -static PyObject * -DateObject_month(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.month); -} - -static PyObject * -DateObject_day(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.day); -} - -static PyObject * -DateObject_day_of_week(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.day_of_week); -} - -static PyObject * -DateObject_day_of_year(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.day_of_year); -} - -static PyObject * -DateObject_week(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dInfoCalc_ISOWeek(&dinfo)); -} - -static PyObject * -DateObject_hour(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.hour); -} - -static PyObject * -DateObject_minute(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; - return PyInt_FromLong(dinfo.minute); -} - -static PyObject * -DateObject_second(DateObject *self, void *closure) { - struct date_info dinfo; - if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; - return PyInt_FromLong((int)dinfo.second); -} - -static PyObject * -DateObject_datetime(DateObject *self, void *closure) { - PyObject *datetime; - struct date_info dinfo; - if(DateObject_set_date_info_wtime(self, &dinfo) == -1) return NULL; - datetime = PyDateTime_FromDateAndTime(dinfo.year, dinfo.month, - dinfo.day, dinfo.hour, - dinfo.minute, (int)dinfo.second, 0); - return datetime; -} - -static int -DateObject_ReadOnlyErr(DateObject *self, PyObject *value, void *closure) { - PyErr_SetString(PyExc_AttributeError, "Cannot set read-only property"); - return -1; -} - -static PyGetSetDef DateObject_getseters[] = { - {"year", (getter)DateObject_year, (setter)DateObject_ReadOnlyErr, - "Returns the year.", NULL}, - {"qyear", (getter)DateObject_qyear, (setter)DateObject_ReadOnlyErr, - "For quarterly frequency dates, returns the year corresponding to the\n" - "year end (start) month. When using QTR or QTR-E based quarterly\n" - "frequencies, this is the fiscal year in a financial context.\n\n" - "For non-quarterly dates, this simply returns the year of the date.", - NULL}, - {"quarter", (getter)DateObject_quarter, (setter)DateObject_ReadOnlyErr, - "Returns the quarter.", NULL}, - {"month", (getter)DateObject_month, (setter)DateObject_ReadOnlyErr, - "Returns the month.", NULL}, - {"week", (getter)DateObject_week, (setter)DateObject_ReadOnlyErr, - "Returns the week.", NULL}, - {"day", (getter)DateObject_day, (setter)DateObject_ReadOnlyErr, - "Returns the day of month.", NULL}, - {"day_of_week", (getter)DateObject_day_of_week, (setter)DateObject_ReadOnlyErr, - "Returns the day of week.", NULL}, - {"day_of_year", (getter)DateObject_day_of_year, (setter)DateObject_ReadOnlyErr, - "Returns the day of year.", NULL}, - {"second", (getter)DateObject_second, (setter)DateObject_ReadOnlyErr, - "Returns the second.", NULL}, - {"minute", (getter)DateObject_minute, (setter)DateObject_ReadOnlyErr, - "Returns the minute.", NULL}, - {"hour", (getter)DateObject_hour, (setter)DateObject_ReadOnlyErr, - "Returns the hour.", NULL}, - - {"freqstr", (getter)DateObject_freqstr, (setter)DateObject_ReadOnlyErr, - "Returns the string representation of frequency.", NULL}, - {"datetime", (getter)DateObject_datetime, (setter)DateObject_ReadOnlyErr, - "Returns the Date object converted to standard python datetime object", - NULL}, - - {NULL} /* Sentinel */ -}; - - -static PyNumberMethods DateObject_as_number = { - (binaryfunc)DateObject___add__, /* nb_add */ - (binaryfunc)DateObject___subtract__, /* nb_subtract */ - 0, /* nb_multiply */ - 0, /* nb_divide */ - 0, /* nb_remainder */ - 0, /* nb_divmod */ - 0, /* nb_power */ - 0, /* nb_negative */ - 0, /* nb_positive */ - 0, /* nb_absolute */ - 0, /* nb_nonzero */ - 0, /* nb_invert */ - 0, /* nb_lshift */ - 0, /* nb_rshift */ - 0, /* nb_and */ - 0, /* nb_xor */ - 0, /* nb_or */ - 0, /* nb_coerce */ - (unaryfunc)DateObject___int__, /* nb_int */ - (unaryfunc)0, /* nb_long */ - (unaryfunc)DateObject___float__, /* nb_float */ - (unaryfunc)0, /* nb_oct */ - (unaryfunc)0, /* nb_hex */ -}; - -static PyMethodDef DateObject_methods[] = { - {"toordinal", (PyCFunction)DateObject_toordinal, METH_NOARGS, - DateObject_toordinal_doc}, - {"strfmt", (PyCFunction)DateObject_strfmt, METH_VARARGS, - DateObject_strfmt_doc}, - {"asfreq", (PyCFunction)DateObject_asfreq, METH_VARARGS | METH_KEYWORDS, - DateObject_asfreq_doc}, - {NULL} /* Sentinel */ -}; - - -static PyTypeObject DateType = { - PyObject_HEAD_INIT(NULL) - 0, /* ob_size */ - "timeseries.Date", /* tp_name */ - sizeof(DateObject), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)DateObject_dealloc, /* tp_dealloc */ - 0, /* tp_print */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - (cmpfunc)DateObject___compare__, /* tp_compare */ - (reprfunc)DateObject___repr__, /* tp_repr */ - &DateObject_as_number, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - (hashfunc)DateObject___hash__, /* tp_hash */ - 0, /* tp_call*/ - (reprfunc)DateObject___str__, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT | /* tp_flags */ - Py_TPFLAGS_CHECKTYPES | - Py_TPFLAGS_BASETYPE, - "Defines a Date object, as the combination of a date and a frequency.\n" - "Several options are available to construct a Date object explicitly:\n\n" - "- Give appropriate values to the `year`, `month`, `day`, `quarter`, `hours`,\n" - " `minutes`, `seconds` arguments.\n\n" - " >>> td.Date(freq='Q',year=2004,quarter=3)\n" - " >>> td.Date(freq='D',year=2001,month=1,day=1)\n\n" - "- Use the `string` keyword. This method uses a modified version of the\n" - " mx.DateTime parser submodule. More information is available in its\n" - " documentation.\n\n" - " >>> ts.Date('D', '2007-01-01')\n\n" - "- Use the `datetime` keyword with an existing datetime.datetime object.\n\n" - " >>> td.Date('D', datetime=datetime.datetime.now())", /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - DateObject_methods, /* tp_methods */ - DateObject_members, /* tp_members */ - DateObject_getseters, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)DateObject_init, /* tp_init */ - 0, /* tp_alloc */ - DateObject_new, /* tp_new */ -}; - - -/////////////////////////////////////////////////////////////////////// - -static char cseries_check_freq_doc[] = -"translate user specified frequency into frequency constant"; -static PyObject * -cseries_check_freq(PyObject *self, PyObject *args) { - - PyObject *freq; - int freq_val; - - if (!PyArg_ParseTuple(args, "O:check_freq(freq)", &freq)) return NULL; - if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; - - return PyInt_FromLong(freq_val); -} - -static char cseries_check_freq_str_doc[] = -"translate user specified frequency into standard string representation"; -static PyObject * -cseries_check_freq_str(PyObject *self, PyObject *args) { - - PyObject *alias_tuple, *result, *freq_key; - - if ((freq_key = cseries_check_freq(self, args)) == NULL) return NULL; - - alias_tuple = PyDict_GetItem(freq_dict, freq_key); - result = PyTuple_GET_ITEM(alias_tuple, 0); - - Py_INCREF(result); - - Py_DECREF(freq_key); - - return result; -} - -static char cseries_get_freq_group_doc[] = -"translate user specified frequency into frequency group constant"; -static PyObject * -cseries_get_freq_group(PyObject *self, PyObject *args) { - - PyObject *freq; - int freq_val; - - if (!PyArg_ParseTuple(args, "O:get_freq_group(freq)", &freq)) return NULL; - if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; - - return PyInt_FromLong(get_freq_group(freq_val)); -} - -static char cseries_thisday_doc[] = -"Returns today's date, at the given frequency\n\n" -":Parameters:\n" -" - freq : string/int\n" -" Frequency to convert the Date to. Accepts any valid frequency\n" -" specification (string or integer)\n"; -static PyObject * -cseries_thisday(PyObject *self, PyObject *args) { - - PyObject *freq, *init_args, *init_kwargs; - time_t rawtime; - struct tm *timeinfo; - int freq_val; - - DateObject *secondly_date; - - if (!PyArg_ParseTuple(args, "O:thisday(freq)", &freq)) return NULL; - - if ((freq_val = check_freq(freq)) == INT_ERR_CODE) return NULL; - - time(&rawtime); - timeinfo = localtime(&rawtime); - - init_args = PyTuple_New(0); - init_kwargs = PyDict_New(); - - DICT_SETINT_STRKEY(init_kwargs, "freq", FR_SEC); - DICT_SETINT_STRKEY(init_kwargs, "year", timeinfo->tm_year+1900); - DICT_SETINT_STRKEY(init_kwargs, "month", timeinfo->tm_mon+1); - DICT_SETINT_STRKEY(init_kwargs, "day", timeinfo->tm_mday); - DICT_SETINT_STRKEY(init_kwargs, "hour", timeinfo->tm_hour); - DICT_SETINT_STRKEY(init_kwargs, "minute", timeinfo->tm_min); - DICT_SETINT_STRKEY(init_kwargs, "second", timeinfo->tm_sec); - - secondly_date = DateObject_New(); - DateObject_init(secondly_date, init_args, init_kwargs); - - Py_DECREF(init_args); - Py_DECREF(init_kwargs); - - if (freq_val != FR_SEC) { - DateObject *result = DateObject_New(); - - long (*asfreq_func)(long, char, struct asfreq_info*) = NULL; - struct asfreq_info af_info; - - int date_val; - - get_asfreq_info(FR_SEC, freq_val, &af_info); - asfreq_func = get_asfreq_func(FR_SEC, freq_val, 0); - - date_val = asfreq_func(secondly_date->value, 'A', &af_info); - - Py_DECREF(secondly_date); - - result->freq = freq_val; - result->value = date_val; - - return (PyObject*)result; - - } else { return (PyObject*)secondly_date; } -} - - -static char TimeSeries_convert_doc[] = ""; -static PyObject * -TimeSeries_convert(PyObject *self, PyObject *args) -{ - PyObject *arrayTest; - PyArrayObject *array, *newArray; - PyArrayObject *mask, *newMask; - - PyObject *returnVal = NULL; - PyObject *start_index_retval; - - long startIndex; - long newStart, newStartTemp; - long newEnd, newEndTemp; - long newLen, newHeight; - int i; - long currIndex, prevIndex; - long nd; - npy_intp *dim, *newIdx; - long currPerLen; - char *position; - PyObject *fromFreq_arg, *toFreq_arg; - int fromFreq, toFreq; - char relation; - struct asfreq_info af_info; - - PyObject *val, *valMask; - - long (*asfreq_main)(long, char, struct asfreq_info*) = NULL; - long (*asfreq_endpoints)(long, char, struct asfreq_info*) = NULL; - long (*asfreq_reverse)(long, char, struct asfreq_info*) = NULL; - - returnVal = PyDict_New(); - - if (!PyArg_ParseTuple(args, - "OOOslO:convert(array, fromfreq, tofreq, position, startIndex, mask)", - &array, &fromFreq_arg, &toFreq_arg, - &position, &startIndex, &mask)) return NULL; - - if((fromFreq = check_freq(fromFreq_arg)) == INT_ERR_CODE) return NULL; - if((toFreq = check_freq(toFreq_arg)) == INT_ERR_CODE) return NULL; - - if (toFreq == fromFreq) - { - PyObject *sidx; - newArray = (PyArrayObject *)PyArray_Copy(array); - newMask = (PyArrayObject *)PyArray_Copy(mask); - sidx = PyInt_FromLong(startIndex); - - PyDict_SetItemString(returnVal, "values", (PyObject*)newArray); - PyDict_SetItemString(returnVal, "mask", (PyObject*)newMask); - PyDict_SetItemString(returnVal, "startindex", sidx); - - Py_DECREF(newArray); - Py_DECREF(newMask); - Py_DECREF(sidx); - - return returnVal; - } - - switch(position[0]) - { - case 'S': - // start -> before - relation = 'B'; - break; - case 'E': - // end -> after - relation = 'A'; - break; - default: - return NULL; - break; - } - - get_asfreq_info(fromFreq, toFreq, &af_info); - - asfreq_main = get_asfreq_func(fromFreq, toFreq, 1); - asfreq_endpoints = get_asfreq_func(fromFreq, toFreq, 0); - - //convert start index to new frequency - CHECK_ASFREQ(newStartTemp = asfreq_main(startIndex, 'B', &af_info)); - if (newStartTemp < 1) { - CHECK_ASFREQ(newStart = asfreq_endpoints(startIndex, 'A', &af_info)); - } - else { newStart = newStartTemp; } - - //convert end index to new frequency - CHECK_ASFREQ(newEndTemp = asfreq_main(startIndex+array->dimensions[0]-1, 'A', &af_info)); - if (newEndTemp < 1) { - CHECK_ASFREQ(newEnd = asfreq_endpoints(startIndex+array->dimensions[0]-1, 'B', &af_info)); - } - else { newEnd = newEndTemp; } - - if (newStart < 1) { - PyErr_SetString(PyExc_ValueError, "start_date outside allowable range for destination frequency"); - return NULL; - } - - newLen = newEnd - newStart + 1; - newHeight = get_height(fromFreq, toFreq); - - if (newHeight > 1) { - long tempval; - struct asfreq_info af_info_rev; - - get_asfreq_info(toFreq, fromFreq, &af_info_rev); - asfreq_reverse = get_asfreq_func(toFreq, fromFreq, 0); - - CHECK_ASFREQ(tempval = asfreq_reverse(newStart, 'B', &af_info_rev)); - currPerLen = startIndex - tempval; - - nd = 2; - dim = PyDimMem_NEW(nd); - dim[0] = (npy_intp)newLen; - dim[1] = (npy_intp)newHeight; - } else { - nd = 1; - dim = PyDimMem_NEW(nd); - dim[0] = (npy_intp)newLen; - } - - newIdx = PyDimMem_NEW(nd); - arrayTest = PyArray_SimpleNew(nd, dim, array->descr->type_num); - if (arrayTest == NULL) { return NULL; } - newArray = (PyArrayObject*)arrayTest; - newMask = (PyArrayObject*)PyArray_SimpleNew(nd, dim, mask->descr->type_num); - - PyDimMem_FREE(dim); - - PyArray_FILLWBYTE(newArray,0); - PyArray_FILLWBYTE(newMask,1); - - prevIndex = newStart; - - //set values in the new array - for (i = 0; i < array->dimensions[0]; i++) { - - val = PyArray_GETITEM(array, PyArray_GetPtr(array, &i)); - valMask = PyArray_GETITEM(mask, PyArray_GetPtr(mask, &i)); - - CHECK_ASFREQ(currIndex = asfreq_main(startIndex + i, relation, &af_info)); - - newIdx[0] = currIndex-newStart; - - if (newHeight > 1) { - - if (currIndex != prevIndex) - { - //reset period length - currPerLen = 0; - prevIndex = currIndex; - } - - newIdx[1] = currPerLen; - currPerLen++; - } - - if (newIdx[0] > -1) { - PyArray_SETITEM(newArray, PyArray_GetPtr(newArray, newIdx), val); - PyArray_SETITEM(newMask, PyArray_GetPtr(newMask, newIdx), valMask); - } - - Py_DECREF(val); - Py_DECREF(valMask); - - } - - PyDimMem_FREE(newIdx); - - start_index_retval = (PyObject*)PyInt_FromLong(newStart); - - PyDict_SetItemString(returnVal, "values", (PyObject*)newArray); - PyDict_SetItemString(returnVal, "mask", (PyObject*)newMask); - PyDict_SetItemString(returnVal, "startindex", start_index_retval); - - Py_DECREF(newArray); - Py_DECREF(newMask); - Py_DECREF(start_index_retval); - - return returnVal; -} - -static PyObject *NP_ADD, *NP_MULTIPLY, *NP_SUBTRACT, *NP_SQRT, - *NP_GREATER, *NP_GREATER_EQUAL; - -static PyObject* -np_add(PyObject *left_val, PyObject *right_val) { - - PyObject *result; - - result = PyObject_CallFunction( - NP_ADD, "OO", - (PyArrayObject*)left_val, - right_val); - return result; -} - -static PyObject* -np_subtract(PyObject *left_val, PyObject *right_val) { - - PyObject *result; - - result = PyObject_CallFunction( - NP_SUBTRACT, "OO", - (PyArrayObject*)left_val, - right_val); - return result; -} - -static PyObject* -np_multiply(PyObject *left_val, PyObject *right_val) { - - PyObject *result; - - result = PyObject_CallFunction( - NP_MULTIPLY, "OO", - (PyArrayObject*)left_val, - right_val); - return result; -} - -static PyObject* -np_sqrt(PyObject *val) { - return PyObject_CallFunction(NP_SQRT, "(O)", val); -} - -static int np_greater(PyObject *left_val, PyObject *right_val) { - - PyObject *temp; - int result; - - temp = PyObject_CallFunction( - NP_GREATER, "OO", - (PyArrayObject*)left_val, - right_val); - - result = (int)PyInt_AsLong(temp); - Py_DECREF(temp); - return result; -} - -static int np_greater_equal(PyObject *left_val, PyObject *right_val) { - - PyObject *temp; - int result; - - temp = PyObject_CallFunction( - NP_GREATER_EQUAL, "OO", - (PyArrayObject*)left_val, - right_val); - - result = (int)PyInt_AsLong(temp); - Py_DECREF(temp); - return result; -} - - -/* This function is directly copied from direct copy of function in */ -/* Return typenumber from dtype2 unless it is NULL, then return - NPY_DOUBLE if dtype1->type_num is integer or bool - and dtype1->type_num otherwise. -*/ -static int -_get_type_num_double(PyArray_Descr *dtype1, PyArray_Descr *dtype2) -{ - if (dtype2 != NULL) - return dtype2->type_num; - - /* For integer or bool data-types */ - if (dtype1->type_num < NPY_FLOAT) { - return NPY_DOUBLE; - } - else { - return dtype1->type_num; - } -} - -#define _CHKTYPENUM(typ) ((typ) ? (typ)->type_num : PyArray_NOTYPE) - -/* validates the standard arguments to moving functions and set the original - mask, original ndarray, and mask for the result */ -static PyObject * -check_mov_args(PyObject *orig_arrayobj, int span, int min_win_size, - PyObject **orig_ndarray, PyObject **result_mask) { - - PyObject *orig_mask=NULL; - PyArrayObject **orig_ndarray_tmp, **result_mask_tmp; - int *raw_result_mask; - - if (!PyArray_Check(orig_arrayobj)) { - PyErr_SetString(PyExc_ValueError, "array must be a valid subtype of ndarray"); - return NULL; - } - - // check if array has a mask, and if that mask is an array - if (PyObject_HasAttrString(orig_arrayobj, "_mask")) { - PyObject *tempMask = PyObject_GetAttrString(orig_arrayobj, "_mask"); - if (PyArray_Check(tempMask)) { - orig_mask = PyArray_EnsureArray(tempMask); - } else { - Py_DECREF(tempMask); - } - } - - *orig_ndarray = PyArray_EnsureArray(orig_arrayobj); - orig_ndarray_tmp = (PyArrayObject**)orig_ndarray; - - if ((*orig_ndarray_tmp)->nd != 1) { - PyErr_SetString(PyExc_ValueError, "array must be 1 dimensional"); - return NULL; - } - - if (span < min_win_size) { - char *error_str; - error_str = malloc(60 * sizeof(char)); - MEM_CHECK(error_str) - sprintf(error_str, - "span must be greater than or equal to %i", - min_win_size); - PyErr_SetString(PyExc_ValueError, error_str); - free(error_str); - return NULL; - } - - raw_result_mask = malloc((*orig_ndarray_tmp)->dimensions[0] * sizeof(int)); - MEM_CHECK(raw_result_mask) - - { - PyArrayObject *orig_mask_tmp; - int i, valid_points=0, is_masked; - - orig_mask_tmp = (PyArrayObject*)orig_mask; - - for (i=0; i<((*orig_ndarray_tmp)->dimensions[0]); i++) { - - is_masked=0; - - if (orig_mask != NULL) { - PyObject *valMask; - valMask = PyArray_GETITEM(orig_mask_tmp, - PyArray_GetPtr(orig_mask_tmp, &i)); - is_masked = (int)PyInt_AsLong(valMask); - Py_DECREF(valMask); - } - - if (is_masked) { - valid_points=0; - } else { - if (valid_points < span) { valid_points += 1; } - if (valid_points < span) { is_masked = 1; } - } - - raw_result_mask[i] = is_masked; - } - } - - *result_mask = PyArray_SimpleNewFromData( - 1, (*orig_ndarray_tmp)->dimensions, - PyArray_INT32, raw_result_mask); - MEM_CHECK(*result_mask) - result_mask_tmp = (PyArrayObject**)result_mask; - (*result_mask_tmp)->flags = ((*result_mask_tmp)->flags) | NPY_OWNDATA; - return 0; -} - -/* computation portion of moving sum. Appropriate mask is overlayed on top - afterwards */ -static PyObject* -calc_mov_sum(PyArrayObject *orig_ndarray, int span, int rtype) -{ - PyArrayObject *result_ndarray=NULL; - int i; - - result_ndarray = (PyArrayObject*)PyArray_ZEROS( - orig_ndarray->nd, - orig_ndarray->dimensions, - rtype, 0); - ERR_CHECK(result_ndarray) - - for (i=0; idimensions[0]; i++) { - - PyObject *val=NULL, *mov_sum_val=NULL; - - val = PyArray_GETITEM(orig_ndarray, PyArray_GetPtr(orig_ndarray, &i)); - - if (i == 0) { - mov_sum_val = val; - } else { - int prev_idx = i-1; - PyObject *mov_sum_prevval; - mov_sum_prevval= PyArray_GETITEM(result_ndarray, - PyArray_GetPtr(result_ndarray, &prev_idx)); - mov_sum_val = np_add(val, mov_sum_prevval); - Py_DECREF(mov_sum_prevval); - ERR_CHECK(mov_sum_val) - - if (i >= span) { - PyObject *temp_val, *rem_val; - int rem_idx = i-span; - temp_val = mov_sum_val; - rem_val = PyArray_GETITEM(orig_ndarray, - PyArray_GetPtr(orig_ndarray, &rem_idx)); - - mov_sum_val = np_subtract(temp_val, rem_val); - ERR_CHECK(mov_sum_val) - - Py_DECREF(temp_val); - Py_DECREF(rem_val); - } - } - - PyArray_SETITEM(result_ndarray, - PyArray_GetPtr(result_ndarray, &i), - mov_sum_val); - - if (mov_sum_val != val) { Py_DECREF(val); } - - Py_DECREF(mov_sum_val); - } - - return (PyObject*)result_ndarray; - -} - -static char MaskedArray_mov_sum_doc[] = ""; -static PyObject * -MaskedArray_mov_sum(PyObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *orig_arrayobj=NULL, *orig_ndarray=NULL, - *result_ndarray=NULL, *result_mask=NULL, - *result_dict=NULL; - PyArray_Descr *dtype=NULL; - - int rtype, span; - - static char *kwlist[] = {"array", "span", "dtype", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, - "Oi|O&:mov_sum(array, span, dtype)", kwlist, - &orig_arrayobj, &span, - PyArray_DescrConverter2, &dtype)) return NULL; - - check_mov_args(orig_arrayobj, span, 1, - &orig_ndarray, &result_mask); - - rtype = _CHKTYPENUM(dtype); - - result_ndarray = calc_mov_sum((PyArrayObject*)orig_ndarray, - span, rtype); - ERR_CHECK(result_ndarray) - - result_dict = PyDict_New(); - MEM_CHECK(result_dict) - PyDict_SetItemString(result_dict, "array", result_ndarray); - PyDict_SetItemString(result_dict, "mask", result_mask); - - Py_DECREF(result_ndarray); - Py_DECREF(result_mask); - return result_dict; -} - -static char MaskedArray_mov_average_doc[] = ""; -static PyObject * -MaskedArray_mov_average(PyObject *self, PyObject *args, PyObject *kwds) -{ - PyObject *orig_arrayobj=NULL, *orig_ndarray=NULL, - *result_ndarray=NULL, *result_mask=NULL, - *result_dict=NULL, - *mov_sum=NULL, *denom=NULL; - PyArray_Descr *dtype=NULL; - - int rtype, span; - - static char *kwlist[] = {"array", "span", "dtype", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, - "Oi|O&:mov_average(array, span, dtype)", kwlist, - &orig_arrayobj, &span, - PyArray_DescrConverter2, &dtype)) return NULL; - - - check_mov_args(orig_arrayobj, span, 2, - &orig_ndarray, &result_mask); - - rtype = _get_type_num_double(((PyArrayObject*)orig_ndarray)->descr, dtype); - - mov_sum = calc_mov_sum((PyArrayObject*)orig_ndarray, span, rtype); - ERR_CHECK(mov_sum) - - denom = PyFloat_FromDouble(1.0/(double)(span)); - - result_ndarray = np_multiply(mov_sum, denom); - ERR_CHECK(result_ndarray) - - Py_DECREF(mov_sum); - Py_DECREF(denom); - - result_dict = PyDict_New(); - MEM_CHECK(result_dict) - PyDict_SetItemString(result_dict, "array", result_ndarray); - PyDict_SetItemString(result_dict, "mask", result_mask); - - Py_DECREF(result_ndarray); - Py_DECREF(result_mask); - return result_dict; -} - - -/* computation portion of moving median. Appropriate mask is overlayed on top - afterwards. - - The algorithm used here is based on the code found at: - http://cran.r-project.org/src/contrib/Devel/runStat_1.1.tar.gz - - This code was originally released under the GPL, but the author - (David Brahm) has granted me (and scipy) permission to use it under the BSD - license. */ -static PyObject* -calc_mov_median(PyArrayObject *orig_ndarray, int span, int rtype) -{ - PyArrayObject *result_ndarray=NULL; - PyObject **result_array, **ref_array, **even_array=NULL; - PyObject *new_val, *old_val; - PyObject *temp_add, *one_half; - int a, i, k, R, arr_size, z; - int *r; - - arr_size = orig_ndarray->dimensions[0]; - - result_ndarray = (PyArrayObject*)PyArray_ZEROS( - orig_ndarray->nd, - orig_ndarray->dimensions, - rtype, 0); - ERR_CHECK(result_ndarray) - - if (arr_size >= span) { - result_array = calloc(arr_size, sizeof(PyObject*)); - MEM_CHECK(result_array) - - /* this array will be used for quick access to the data in the original - array (so PyArray_GETITEM doesn't have to be used over and over in the - main loop) */ - ref_array = malloc(arr_size * sizeof(PyObject*)); - MEM_CHECK(ref_array) - - for (i=0; i= span-1; i--) { - a = span; - z = i - span + 1; - old_val = ref_array[i+1]; - new_val = ref_array[i-span+1]; - - for (k=span-1; k > 0; k--) { - r[k] = r[k-1]; /* Shift previous iteration's ranks */ - if (np_greater_equal(ref_array[z+k], new_val)) {r[k]++; a--;} - if (np_greater(ref_array[z+k], old_val)) {r[k]--;} - - if (r[k]==R) { - result_array[i] = ref_array[z+k]; - } - - if (even_array != NULL) { - if (r[k]==R) { - even_array[0] = ref_array[z+k]; - } else if (r[k] == (R+1)) { - even_array[1] = ref_array[z+k]; - } - } else { - if (r[k]==R) { - result_array[i] = ref_array[z+k]; - } - } - - } - - r[0] = a; - - if (even_array != NULL) { - if (a==R) { - even_array[0] = new_val; - } else if (a == (R+1)) { - even_array[1] = new_val; - } - - temp_add = np_add(even_array[0], even_array[1]); - result_array[i] = np_multiply(temp_add, one_half);; - Py_DECREF(temp_add); - - } else { - if (a==R) { - result_array[i] = new_val; - } - } - - } - - Py_DECREF(one_half); - - for (i=span-1; idescr, dtype); - - mov_sum = calc_mov_sum((PyArrayObject*)orig_ndarray, span, rtype); - ERR_CHECK(mov_sum) - - result_temp1 = np_multiply(orig_ndarray, orig_ndarray); - ERR_CHECK(result_temp1) - - mov_sum_sq = calc_mov_sum((PyArrayObject*)result_temp1, span, rtype); - Py_DECREF(result_temp1); - ERR_CHECK(mov_sum_sq) - - - /* - formulas from: - http://en.wikipedia.org/wiki/Standard_deviation#Rapid_calculation_methods - */ - if (bias == 0) { - denom1 = PyFloat_FromDouble(1.0/(double)(span-1)); - denom2 = PyFloat_FromDouble(1.0/(double)(span*(span-1))); - } else { - denom1 = PyFloat_FromDouble(1.0/(double)span); - denom2 = PyFloat_FromDouble(1.0/(double)(span*span)); - } - - result_temp1 = np_multiply(mov_sum_sq, denom1); - ERR_CHECK(result_temp1) - Py_DECREF(mov_sum_sq); - Py_DECREF(denom1); - - result_temp3 = np_multiply(mov_sum, mov_sum); - ERR_CHECK(result_temp3) - Py_DECREF(mov_sum); - - result_temp2 = np_multiply(result_temp3, denom2); - ERR_CHECK(result_temp2) - Py_DECREF(result_temp3); - Py_DECREF(denom2); - - result_temp3 = np_subtract(result_temp1, result_temp2); - ERR_CHECK(result_temp3) - Py_DECREF(result_temp1); - Py_DECREF(result_temp2); - - if (is_variance) { - result_ndarray = result_temp3; - } else { - result_temp1 = np_sqrt(result_temp3); - ERR_CHECK(result_temp1) - Py_DECREF(result_temp3); - result_ndarray = result_temp1; - } - - result_dict = PyDict_New(); - MEM_CHECK(result_dict) - PyDict_SetItemString(result_dict, "array", result_ndarray); - PyDict_SetItemString(result_dict, "mask", result_mask); - - Py_DECREF(result_ndarray); - Py_DECREF(result_mask); - return result_dict; -} - - -static char DateArray_asfreq_doc[] = ""; -static PyObject * -DateArray_asfreq(PyObject *self, PyObject *args) -{ - PyArrayObject *fromDates, *toDates; - PyArrayIterObject *iterFrom, *iterTo; - PyObject *fromDateObj, *toDateObj; - char *relation; - int fromFreq, toFreq; - long fromDate, toDate; - long (*asfreq_main)(long, char, struct asfreq_info*) = NULL; - struct asfreq_info af_info; - - if (!PyArg_ParseTuple(args, - "Oiis:asfreq(fromDates, fromfreq, tofreq, relation)", - &fromDates, &fromFreq, &toFreq, &relation)) return NULL; - - get_asfreq_info(fromFreq, toFreq, &af_info); - - asfreq_main = get_asfreq_func(fromFreq, toFreq, 0); - - toDates = (PyArrayObject *)PyArray_Copy(fromDates); - - iterFrom = (PyArrayIterObject *)PyArray_IterNew((PyObject *)fromDates); - if (iterFrom == NULL) return NULL; - - iterTo = (PyArrayIterObject *)PyArray_IterNew((PyObject *)toDates); - if (iterTo == NULL) return NULL; - - while (iterFrom->index < iterFrom->size) { - - fromDateObj = PyArray_GETITEM(fromDates, iterFrom->dataptr); - fromDate = PyInt_AsLong(fromDateObj); - CHECK_ASFREQ(toDate = asfreq_main(fromDate, relation[0], &af_info)); - toDateObj = PyInt_FromLong(toDate); - - PyArray_SETITEM(toDates, iterTo->dataptr, toDateObj); - - Py_DECREF(fromDateObj); - Py_DECREF(toDateObj); - - PyArray_ITER_NEXT(iterFrom); - PyArray_ITER_NEXT(iterTo); - } - - Py_DECREF(iterFrom); - Py_DECREF(iterTo); - - return (PyObject *)toDates; - -} - -static char DateArray_getDateInfo_doc[] = ""; -static PyObject * -DateArray_getDateInfo(PyObject *self, PyObject *args) -{ - int freq; - char *info; - - PyArrayObject *array; - PyArrayObject *newArray; - PyArrayIterObject *iterSource, *iterResult; - - PyObject* (*getDateInfo)(DateObject*, void*) = NULL; - - if (!PyArg_ParseTuple(args, "Ois:getDateInfo(array, freq, info)", &array, &freq, &info)) return NULL; - newArray = (PyArrayObject *)PyArray_Copy(array); - - iterSource = (PyArrayIterObject *)PyArray_IterNew((PyObject *)array); - iterResult = (PyArrayIterObject *)PyArray_IterNew((PyObject *)newArray); - - - switch(*info) - { - case 'Y': //year - getDateInfo = &DateObject_year; - break; - case 'F': //"fiscal" year - getDateInfo = &DateObject_qyear; - break; - case 'Q': //quarter - getDateInfo = &DateObject_quarter; - break; - case 'M': //month - getDateInfo = &DateObject_month; - break; - case 'D': //day - getDateInfo = &DateObject_day; - break; - case 'R': //day of year - getDateInfo = &DateObject_day_of_year; - break; - case 'W': //day of week - getDateInfo = &DateObject_day_of_week; - break; - case 'I': //week of year - getDateInfo = &DateObject_week; - break; - case 'H': //hour - getDateInfo = &DateObject_hour; - break; - case 'T': //minute - getDateInfo = &DateObject_minute; - break; - case 'S': //second - getDateInfo = &DateObject_second; - break; - default: - return NULL; - } - - while (iterSource->index < iterSource->size) { - DateObject *curr_date; - PyObject *val, *dInfo; - - val = PyArray_GETITEM(array, iterSource->dataptr); - curr_date = DateObject_FromFreqAndValue(freq, PyInt_AsLong(val)); - dInfo = getDateInfo(curr_date, NULL); - - PyArray_SETITEM(newArray, iterResult->dataptr, dInfo); - - Py_DECREF(val); - Py_DECREF(curr_date); - Py_DECREF(dInfo); - - PyArray_ITER_NEXT(iterSource); - PyArray_ITER_NEXT(iterResult); - } - - Py_DECREF(iterSource); - Py_DECREF(iterResult); - - return (PyObject *) newArray; -} - static PyMethodDef cseries_methods[] = { {"MA_mov_sum", (PyCFunction)MaskedArray_mov_sum, - METH_VARARGS | METH_KEYWORDS, MaskedArray_mov_sum_doc}, + METH_VARARGS | METH_KEYWORDS, ""}, {"MA_mov_median", (PyCFunction)MaskedArray_mov_median, - METH_VARARGS | METH_KEYWORDS, MaskedArray_mov_median_doc}, + METH_VARARGS | METH_KEYWORDS, ""}, {"MA_mov_average", (PyCFunction)MaskedArray_mov_average, - METH_VARARGS | METH_KEYWORDS, MaskedArray_mov_average_doc}, + METH_VARARGS | METH_KEYWORDS, ""}, {"MA_mov_stddev", (PyCFunction)MaskedArray_mov_stddev, - METH_VARARGS | METH_KEYWORDS, MaskedArray_mov_stddev_doc}, + METH_VARARGS | METH_KEYWORDS, ""}, {"TS_convert", (PyCFunction)TimeSeries_convert, - METH_VARARGS, TimeSeries_convert_doc}, + METH_VARARGS, ""}, {"DA_asfreq", (PyCFunction)DateArray_asfreq, - METH_VARARGS, DateArray_asfreq_doc}, + METH_VARARGS, ""}, {"DA_getDateInfo", (PyCFunction)DateArray_getDateInfo, - METH_VARARGS, DateArray_getDateInfo_doc}, + METH_VARARGS, ""}, - {"thisday", (PyCFunction)cseries_thisday, - METH_VARARGS, cseries_thisday_doc}, - {"check_freq", (PyCFunction)cseries_check_freq, - METH_VARARGS, cseries_check_freq_doc}, - {"check_freq_str", (PyCFunction)cseries_check_freq_str, - METH_VARARGS, cseries_check_freq_str_doc}, - {"get_freq_group", (PyCFunction)cseries_get_freq_group, - METH_VARARGS, cseries_get_freq_group_doc}, + {"thisday", (PyCFunction)c_tdates_thisday, + METH_VARARGS, c_tdates_thisday_doc}, + {"check_freq", (PyCFunction)c_tdates_check_freq, + METH_VARARGS, c_tdates_check_freq_doc}, + {"check_freq_str", (PyCFunction)c_tdates_check_freq_str, + METH_VARARGS, c_tdates_check_freq_str_doc}, + {"get_freq_group", (PyCFunction)c_tdates_get_freq_group, + METH_VARARGS, c_tdates_get_freq_group_doc}, {"set_callback_DateFromString", (PyCFunction)set_callback_DateFromString, - METH_VARARGS, set_callback_DateFromString_doc}, + METH_VARARGS, ""}, {"set_callback_DateTimeFromString", (PyCFunction)set_callback_DateTimeFromString, - METH_VARARGS, set_callback_DateTimeFromString_doc}, + METH_VARARGS, ""}, {NULL, NULL} }; @@ -3688,52 +42,13 @@ PyMODINIT_FUNC initcseries(void) { - PyObject *m, *ops_dict; + PyObject *m; - if (PyType_Ready(&DateType) < 0) return; - - DateCalc_Error = - PyErr_NewException("cseries.DateCalc_Error", NULL, NULL); - DateCalc_RangeError = - PyErr_NewException("cseries.DateCalc_RangeError", NULL, NULL); - - m = Py_InitModule3("cseries", cseries_methods, cseries_doc); + m = Py_InitModule("cseries", cseries_methods); if (m == NULL) return; - import_array(); - PyDateTime_IMPORT; - ops_dict = PyArray_GetNumericOps(); - NP_ADD = PyDict_GetItemString(ops_dict, "add"); - NP_MULTIPLY = PyDict_GetItemString(ops_dict, "multiply"); - NP_SUBTRACT = PyDict_GetItemString(ops_dict, "subtract"); - NP_SQRT = PyDict_GetItemString(ops_dict, "sqrt"); - NP_GREATER = PyDict_GetItemString(ops_dict, "greater"); - NP_GREATER_EQUAL = PyDict_GetItemString(ops_dict, "greater_equal"); - - Py_INCREF(NP_ADD); - Py_INCREF(NP_MULTIPLY); - Py_INCREF(NP_SUBTRACT); - Py_INCREF(NP_SQRT); - Py_INCREF(NP_GREATER); - Py_INCREF(NP_GREATER_EQUAL); - Py_DECREF(ops_dict); - - Py_INCREF(&DateType); - PyModule_AddObject(m, "Date", (PyObject *)(&DateType)); - - if(build_freq_dict() == INT_ERR_CODE) { - PyErr_SetString( \ - PyExc_ImportError, \ - "initialization of module timeseries.cseries failed"); - return; - }; - - PyModule_AddObject(m, "freq_dict", freq_dict); - PyModule_AddObject(m, "freq_dict_rev", freq_dict_rev); - PyModule_AddObject(m, "freq_constants", freq_constants); - - PyModule_AddObject(m, "DateCalc_Error", DateCalc_Error); - PyModule_AddObject(m, "DateCalc_RangeError", DateCalc_RangeError); - + import_c_lib(m); + import_c_tdates(m); + import_c_tseries(m); } From scipy-svn at scipy.org Wed May 9 13:10:31 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 12:10:31 -0500 (CDT) Subject: [Scipy-svn] r2977 - trunk/Lib/sandbox/timeseries Message-ID: <20070509171031.4E99C39C085@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 12:10:27 -0500 (Wed, 09 May 2007) New Revision: 2977 Modified: trunk/Lib/sandbox/timeseries/setup.py Log: updated to reflect changes to C code Modified: trunk/Lib/sandbox/timeseries/setup.py =================================================================== --- trunk/Lib/sandbox/timeseries/setup.py 2007-05-09 17:10:00 UTC (rev 2976) +++ trunk/Lib/sandbox/timeseries/setup.py 2007-05-09 17:10:27 UTC (rev 2977) @@ -10,11 +10,13 @@ from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs nxheader = join(get_numpy_include_dirs()[0],'numpy',) confgr = Configuration('timeseries',parent_package,top_path) - sources = join('src', 'cseries.c') + sources = [join('src', x) for x in ('c_lib.c', + 'c_tdates.c', + 'c_tseries.c', + 'cseries.c')] confgr.add_extension('cseries', - sources=[sources,], - include_dirs=[nxheader], - ) + sources=sources, + include_dirs=[nxheader, 'include']) confgr.add_subpackage('lib') confgr.add_subpackage('io') From scipy-svn at scipy.org Wed May 9 14:38:56 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 13:38:56 -0500 (CDT) Subject: [Scipy-svn] r2978 - trunk/Lib/sandbox/timeseries Message-ID: <20070509183856.D079B39C209@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 13:38:52 -0500 (Wed, 09 May 2007) New Revision: 2978 Removed: trunk/Lib/sandbox/timeseries/.project trunk/Lib/sandbox/timeseries/.pydevproject Log: Removed file/folder Deleted: trunk/Lib/sandbox/timeseries/.project =================================================================== --- trunk/Lib/sandbox/timeseries/.project 2007-05-09 17:10:27 UTC (rev 2977) +++ trunk/Lib/sandbox/timeseries/.project 2007-05-09 18:38:52 UTC (rev 2978) @@ -1,17 +0,0 @@ - - - scipy_svn_timeseries - - - - - - org.python.pydev.PyDevBuilder - - - - - - org.python.pydev.pythonNature - - Deleted: trunk/Lib/sandbox/timeseries/.pydevproject =================================================================== --- trunk/Lib/sandbox/timeseries/.pydevproject 2007-05-09 17:10:27 UTC (rev 2977) +++ trunk/Lib/sandbox/timeseries/.pydevproject 2007-05-09 18:38:52 UTC (rev 2978) @@ -1,13 +0,0 @@ - - - - -python 2.4 - -/scipy_svn_timeseries -/scipy_svn_timeseries/mtimeseries - - -/home/backtopop/workspace/numpyalt/src - - From scipy-svn at scipy.org Wed May 9 15:37:28 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 9 May 2007 14:37:28 -0500 (CDT) Subject: [Scipy-svn] r2979 - in trunk/Lib/sandbox/timeseries: include src Message-ID: <20070509193728.727DAC7C00F@new.scipy.org> Author: mattknox_ca Date: 2007-05-09 14:37:20 -0500 (Wed, 09 May 2007) New Revision: 2979 Modified: trunk/Lib/sandbox/timeseries/include/c_tdates.h trunk/Lib/sandbox/timeseries/src/c_tdates.c trunk/Lib/sandbox/timeseries/src/c_tseries.c trunk/Lib/sandbox/timeseries/src/cseries.c Log: fixed some compiler warnings Modified: trunk/Lib/sandbox/timeseries/include/c_tdates.h =================================================================== --- trunk/Lib/sandbox/timeseries/include/c_tdates.h 2007-05-09 18:38:52 UTC (rev 2978) +++ trunk/Lib/sandbox/timeseries/include/c_tdates.h 2007-05-09 19:37:20 UTC (rev 2979) @@ -109,16 +109,10 @@ PyObject *DateArray_asfreq(PyObject *, PyObject *); PyObject *DateArray_getDateInfo(PyObject *, PyObject *); -char c_tdates_thisday_doc[]; -PyObject *c_tdates_thisday(PyObject *, PyObject *); -char c_tdates_check_freq_doc[]; +PyObject *c_tdates_thisday(PyObject *, PyObject *); PyObject *c_tdates_check_freq(PyObject *, PyObject *); - -char c_tdates_check_freq_str_doc[]; PyObject *c_tdates_check_freq_str(PyObject *, PyObject *); - -char c_tdates_get_freq_group_doc[]; PyObject *c_tdates_get_freq_group(PyObject *, PyObject *); PyObject *set_callback_DateFromString(PyObject *, PyObject *); Modified: trunk/Lib/sandbox/timeseries/src/c_tdates.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-09 18:38:52 UTC (rev 2978) +++ trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-09 19:37:20 UTC (rev 2979) @@ -2279,8 +2279,6 @@ /////////////////////////////////////////////////////////////////////// -char c_tdates_check_freq_doc[] = -"translate user specified frequency into frequency constant"; PyObject * c_tdates_check_freq(PyObject *self, PyObject *args) { @@ -2293,8 +2291,6 @@ return PyInt_FromLong(freq_val); } -char c_tdates_check_freq_str_doc[] = -"translate user specified frequency into standard string representation"; PyObject * c_tdates_check_freq_str(PyObject *self, PyObject *args) { @@ -2312,8 +2308,6 @@ return result; } -char c_tdates_get_freq_group_doc[] = -"translate user specified frequency into frequency group constant"; PyObject * c_tdates_get_freq_group(PyObject *self, PyObject *args) { @@ -2326,12 +2320,6 @@ return PyInt_FromLong(get_freq_group(freq_val)); } -char c_tdates_thisday_doc[] = -"Returns today's date, at the given frequency\n\n" -":Parameters:\n" -" - freq : string/int\n" -" Frequency to convert the Date to. Accepts any valid frequency\n" -" specification (string or integer)\n"; PyObject * c_tdates_thisday(PyObject *self, PyObject *args) { Modified: trunk/Lib/sandbox/timeseries/src/c_tseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-09 18:38:52 UTC (rev 2978) +++ trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-09 19:37:20 UTC (rev 2979) @@ -118,7 +118,6 @@ long newStart, newStartTemp; long newEnd, newEndTemp; long newLen, newHeight; - int i; long currIndex, prevIndex; long nd; npy_intp *dim, *newIdx; @@ -128,6 +127,7 @@ int fromFreq, toFreq; char relation; asfreq_info af_info; + Py_ssize_t i; PyObject *val, *valMask; @@ -359,7 +359,8 @@ { PyArrayObject *orig_mask_tmp; - int i, valid_points=0, is_masked; + int valid_points=0, is_masked; + Py_ssize_t i; orig_mask_tmp = (PyArrayObject*)orig_mask; @@ -401,7 +402,7 @@ calc_mov_sum(PyArrayObject *orig_ndarray, int span, int rtype) { PyArrayObject *result_ndarray=NULL; - int i; + Py_ssize_t i; result_ndarray = (PyArrayObject*)PyArray_ZEROS( orig_ndarray->nd, @@ -418,7 +419,7 @@ if (i == 0) { mov_sum_val = val; } else { - int prev_idx = i-1; + Py_ssize_t prev_idx = i-1; PyObject *mov_sum_prevval; mov_sum_prevval= PyArray_GETITEM(result_ndarray, PyArray_GetPtr(result_ndarray, &prev_idx)); @@ -428,7 +429,7 @@ if (i >= span) { PyObject *temp_val, *rem_val; - int rem_idx = i-span; + Py_ssize_t rem_idx = i-span; temp_val = mov_sum_val; rem_val = PyArray_GETITEM(orig_ndarray, PyArray_GetPtr(orig_ndarray, &rem_idx)); @@ -552,8 +553,9 @@ PyObject **result_array, **ref_array, **even_array=NULL; PyObject *new_val, *old_val; PyObject *temp_add, *one_half; - int a, i, k, R, arr_size, z; + int a, k, R, arr_size, z; int *r; + Py_ssize_t i; arr_size = orig_ndarray->dimensions[0]; @@ -830,6 +832,7 @@ Py_DECREF(result_ndarray); Py_DECREF(result_mask); return result_dict; + } void import_c_tseries(PyObject *m) { import_array(); } Modified: trunk/Lib/sandbox/timeseries/src/cseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-09 18:38:52 UTC (rev 2978) +++ trunk/Lib/sandbox/timeseries/src/cseries.c 2007-05-09 19:37:20 UTC (rev 2979) @@ -2,7 +2,6 @@ #include "c_tdates.h" #include "c_tseries.h" - static PyMethodDef cseries_methods[] = { {"MA_mov_sum", (PyCFunction)MaskedArray_mov_sum, @@ -22,15 +21,28 @@ {"DA_getDateInfo", (PyCFunction)DateArray_getDateInfo, METH_VARARGS, ""}, + {"thisday", (PyCFunction)c_tdates_thisday, - METH_VARARGS, c_tdates_thisday_doc}, + METH_VARARGS, + "Returns today's date, at the given frequency\n\n" + ":Parameters:\n" + " - freq : string/int\n" + " Frequency to convert the Date to. Accepts any valid frequency\n" + " specification (string or integer)\n"}, + {"check_freq", (PyCFunction)c_tdates_check_freq, - METH_VARARGS, c_tdates_check_freq_doc}, + METH_VARARGS, + "translate user specified frequency into frequency constant"}, + {"check_freq_str", (PyCFunction)c_tdates_check_freq_str, - METH_VARARGS, c_tdates_check_freq_str_doc}, + METH_VARARGS, + "translate user specified frequency into standard string representation"}, + {"get_freq_group", (PyCFunction)c_tdates_get_freq_group, - METH_VARARGS, c_tdates_get_freq_group_doc}, + METH_VARARGS, + "translate user specified frequency into frequency group constant"}, + {"set_callback_DateFromString", (PyCFunction)set_callback_DateFromString, METH_VARARGS, ""}, {"set_callback_DateTimeFromString", (PyCFunction)set_callback_DateTimeFromString, @@ -51,4 +63,5 @@ import_c_lib(m); import_c_tdates(m); import_c_tseries(m); + } From scipy-svn at scipy.org Thu May 10 09:34:31 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 10 May 2007 08:34:31 -0500 (CDT) Subject: [Scipy-svn] r2980 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070510133431.1C73639C191@new.scipy.org> Author: mattknox_ca Date: 2007-05-10 08:34:25 -0500 (Thu, 10 May 2007) New Revision: 2980 Modified: trunk/Lib/sandbox/timeseries/src/c_tseries.c Log: applied some fixes for 64-bit problems Modified: trunk/Lib/sandbox/timeseries/src/c_tseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-09 19:37:20 UTC (rev 2979) +++ trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-10 13:34:25 UTC (rev 2980) @@ -127,7 +127,7 @@ int fromFreq, toFreq; char relation; asfreq_info af_info; - Py_ssize_t i; + int i; PyObject *val, *valMask; @@ -241,12 +241,14 @@ //set values in the new array for (i = 0; i < array->dimensions[0]; i++) { - val = PyArray_GETITEM(array, PyArray_GetPtr(array, &i)); - valMask = PyArray_GETITEM(mask, PyArray_GetPtr(mask, &i)); + npy_intp idx = (npy_intp)i; + val = PyArray_GETITEM(array, PyArray_GetPtr(array, &idx)); + valMask = PyArray_GETITEM(mask, PyArray_GetPtr(mask, &idx)); + CHECK_ASFREQ(currIndex = asfreq_main(startIndex + i, relation, &af_info)); - newIdx[0] = currIndex-newStart; + newIdx[0] = (npy_intp)(currIndex-newStart); if (newHeight > 1) { @@ -257,7 +259,7 @@ prevIndex = currIndex; } - newIdx[1] = currPerLen; + newIdx[1] = (npy_intp)currPerLen; currPerLen++; } @@ -359,19 +361,19 @@ { PyArrayObject *orig_mask_tmp; - int valid_points=0, is_masked; - Py_ssize_t i; + int i, valid_points=0, is_masked; orig_mask_tmp = (PyArrayObject*)orig_mask; for (i=0; i<((*orig_ndarray_tmp)->dimensions[0]); i++) { + npy_intp idx = (npy_intp)i; is_masked=0; if (orig_mask != NULL) { PyObject *valMask; valMask = PyArray_GETITEM(orig_mask_tmp, - PyArray_GetPtr(orig_mask_tmp, &i)); + PyArray_GetPtr(orig_mask_tmp, &idx)); is_masked = (int)PyInt_AsLong(valMask); Py_DECREF(valMask); } @@ -402,7 +404,7 @@ calc_mov_sum(PyArrayObject *orig_ndarray, int span, int rtype) { PyArrayObject *result_ndarray=NULL; - Py_ssize_t i; + int i; result_ndarray = (PyArrayObject*)PyArray_ZEROS( orig_ndarray->nd, @@ -413,26 +415,27 @@ for (i=0; idimensions[0]; i++) { PyObject *val=NULL, *mov_sum_val=NULL; + npy_intp idx = (npy_intp)i; - val = PyArray_GETITEM(orig_ndarray, PyArray_GetPtr(orig_ndarray, &i)); + val = PyArray_GETITEM(orig_ndarray, PyArray_GetPtr(orig_ndarray, &idx)); if (i == 0) { mov_sum_val = val; } else { - Py_ssize_t prev_idx = i-1; + idx = (npy_intp)(i-1); PyObject *mov_sum_prevval; mov_sum_prevval= PyArray_GETITEM(result_ndarray, - PyArray_GetPtr(result_ndarray, &prev_idx)); + PyArray_GetPtr(result_ndarray, &idx)); mov_sum_val = np_add(val, mov_sum_prevval); Py_DECREF(mov_sum_prevval); ERR_CHECK(mov_sum_val) if (i >= span) { PyObject *temp_val, *rem_val; - Py_ssize_t rem_idx = i-span; + idx = (npy_intp)(i-span); temp_val = mov_sum_val; rem_val = PyArray_GETITEM(orig_ndarray, - PyArray_GetPtr(orig_ndarray, &rem_idx)); + PyArray_GetPtr(orig_ndarray, &idx)); mov_sum_val = np_subtract(temp_val, rem_val); ERR_CHECK(mov_sum_val) @@ -442,8 +445,10 @@ } } + idx = (npy_intp)i; + PyArray_SETITEM(result_ndarray, - PyArray_GetPtr(result_ndarray, &i), + PyArray_GetPtr(result_ndarray, &idx), mov_sum_val); if (mov_sum_val != val) { Py_DECREF(val); } @@ -553,11 +558,11 @@ PyObject **result_array, **ref_array, **even_array=NULL; PyObject *new_val, *old_val; PyObject *temp_add, *one_half; - int a, k, R, arr_size, z; + int a, i, k, R, arr_size, z; int *r; - Py_ssize_t i; + npy_intp idx; - arr_size = orig_ndarray->dimensions[0]; + arr_size = (int)(orig_ndarray->dimensions[0]); result_ndarray = (PyArrayObject*)PyArray_ZEROS( orig_ndarray->nd, @@ -576,7 +581,8 @@ MEM_CHECK(ref_array) for (i=0; i Author: stefan Date: 2007-05-10 17:49:10 -0500 (Thu, 10 May 2007) New Revision: 2981 Modified: trunk/Lib/ndimage/src/nd_image.h Log: ndimage: Remove variable name from function signature in header. Modified: trunk/Lib/ndimage/src/nd_image.h =================================================================== --- trunk/Lib/ndimage/src/nd_image.h 2007-05-10 13:34:25 UTC (rev 2980) +++ trunk/Lib/ndimage/src/nd_image.h 2007-05-10 22:49:10 UTC (rev 2981) @@ -287,7 +287,7 @@ #define NA_OutputArray (*(PyArrayObject* (*) (PyObject*,NumarrayType,int) ) (void *) NA_OutputArray) #define NA_IoArray (*(PyArrayObject* (*) (PyObject*,NumarrayType,int) ) (void *) NA_IoArray) -#define NA_NewArray (*(PyArrayObject* (*) (void* buffer, NumarrayType type, int ndim, ...) ) (void *) NA_NewArray ) +#define NA_NewArray (*(PyArrayObject* (*) (void* buffer, NumarrayType, int, ...) ) (void *) NA_NewArray ) #define NA_elements (*(unsigned long (*) (PyArrayObject*) ) (void *) NA_elements) #define NA_InputArray (*(PyArrayObject* (*) (PyObject*,NumarrayType,int) ) (void *) NA_InputArray) From scipy-svn at scipy.org Thu May 10 19:26:04 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 10 May 2007 18:26:04 -0500 (CDT) Subject: [Scipy-svn] r2982 - trunk/Lib/ndimage/src Message-ID: <20070510232604.BE2BC39C22A@new.scipy.org> Author: stefan Date: 2007-05-10 18:25:48 -0500 (Thu, 10 May 2007) New Revision: 2982 Modified: trunk/Lib/ndimage/src/nd_image.c trunk/Lib/ndimage/src/nd_image.h Log: ndimage: remove usage of NA_elements and NA_NBYTES. Modified: trunk/Lib/ndimage/src/nd_image.c =================================================================== --- trunk/Lib/ndimage/src/nd_image.c 2007-05-10 22:49:10 UTC (rev 2981) +++ trunk/Lib/ndimage/src/nd_image.c 2007-05-10 23:25:48 UTC (rev 2982) @@ -101,7 +101,7 @@ { long *pa, ii; PyArrayObject *array = NA_InputArray(object, PyArray_LONG, NPY_CARRAY); - maybelong length = NA_elements(array); + maybelong length = PyArray_SIZE(array); *sequence = (maybelong*)malloc(length * sizeof(maybelong)); if (!*sequence) { Modified: trunk/Lib/ndimage/src/nd_image.h =================================================================== --- trunk/Lib/ndimage/src/nd_image.h 2007-05-10 22:49:10 UTC (rev 2981) +++ trunk/Lib/ndimage/src/nd_image.h 2007-05-10 23:25:48 UTC (rev 2982) @@ -88,16 +88,6 @@ PyArray_CheckFromAny(a, descr, 0, 0, requires, NULL); } -static unsigned long -NA_elements(PyArrayObject *a) -{ - int i; - unsigned long n = 1; - for(i = 0; ind; i++) - n *= a->dimensions[i]; - return n; -} - /* satisfies ensures that 'a' meets a set of requirements and matches the specified type. */ @@ -247,8 +237,6 @@ return self; } -#define NA_NBYTES(a) (a->descr->elsize * NA_elements(a)) - static PyArrayObject * NA_NewAll(int ndim, maybelong *shape, NumarrayType type, void *buffer, maybelong byteoffset, maybelong bytestride, @@ -265,9 +253,9 @@ result = NULL; } else { if (buffer) { - memcpy(result->data, buffer, NA_NBYTES(result)); + memcpy(result->data, buffer, PyArray_NBYTES(result)); } else { - memset(result->data, 0, NA_NBYTES(result)); + memset(result->data, 0, PyArray_NBYTES(result)); } } } From scipy-svn at scipy.org Fri May 11 09:38:04 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 11 May 2007 08:38:04 -0500 (CDT) Subject: [Scipy-svn] r2983 - trunk/Lib/sandbox/timeseries Message-ID: <20070511133804.0728239C030@new.scipy.org> Author: mattknox_ca Date: 2007-05-11 08:38:00 -0500 (Fri, 11 May 2007) New Revision: 2983 Removed: trunk/Lib/sandbox/timeseries/archived_version/ Log: deleted archive_version folder. We can always retrieve this stuff from subversion later if we think we need something from it. From scipy-svn at scipy.org Fri May 11 16:25:03 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 11 May 2007 15:25:03 -0500 (CDT) Subject: [Scipy-svn] r2984 - in trunk/Lib/sandbox/timeseries: lib src Message-ID: <20070511202503.F0E3D39C060@new.scipy.org> Author: mattknox_ca Date: 2007-05-11 15:24:55 -0500 (Fri, 11 May 2007) New Revision: 2984 Modified: trunk/Lib/sandbox/timeseries/lib/moving_funcs.py trunk/Lib/sandbox/timeseries/src/c_tseries.c Log: fixed a couple problems with the data type for moving function results Modified: trunk/Lib/sandbox/timeseries/lib/moving_funcs.py =================================================================== --- trunk/Lib/sandbox/timeseries/lib/moving_funcs.py 2007-05-11 13:38:00 UTC (rev 2983) +++ trunk/Lib/sandbox/timeseries/lib/moving_funcs.py 2007-05-11 20:24:55 UTC (rev 2984) @@ -80,10 +80,9 @@ $$dtype$$""" kwargs = {'span':span} - if dtype is None: dtype = data.dtype - kwargs['dtype'] = dtype - - + if dtype is not None: + kwargs['dtype'] = dtype + return _moving_func(data, MA_mov_sum, kwargs) #............................................................................... def mov_median(data, span, dtype=None): @@ -95,8 +94,8 @@ $$dtype$$""" kwargs = {'span':span} - if dtype is None: dtype = data.dtype - kwargs['dtype'] = dtype + if dtype is not None: + kwargs['dtype'] = dtype return _moving_func(data, MA_mov_median, kwargs) #............................................................................... Modified: trunk/Lib/sandbox/timeseries/src/c_tseries.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-11 13:38:00 UTC (rev 2983) +++ trunk/Lib/sandbox/timeseries/src/c_tseries.c 2007-05-11 20:24:55 UTC (rev 2984) @@ -289,7 +289,7 @@ } -/* This function is directly copied from direct copy of function in */ +/* This function is directly copied from the numpy source */ /* Return typenumber from dtype2 unless it is NULL, then return NPY_DOUBLE if dtype1->type_num is integer or bool and dtype1->type_num otherwise. @@ -297,20 +297,30 @@ static int _get_type_num_double(PyArray_Descr *dtype1, PyArray_Descr *dtype2) { - if (dtype2 != NULL) - return dtype2->type_num; + if (dtype2 != NULL) { + return dtype2->type_num; + } - /* For integer or bool data-types */ - if (dtype1->type_num < NPY_FLOAT) { - return NPY_DOUBLE; - } - else { - return dtype1->type_num; - } + /* For integer or bool data-types */ + if (dtype1->type_num < NPY_FLOAT) { + return NPY_DOUBLE; + } + else { + return dtype1->type_num; + } } -#define _CHKTYPENUM(typ) ((typ) ? (typ)->type_num : PyArray_NOTYPE) +static int +_get_type_num(PyArray_Descr *dtype1, PyArray_Descr *dtype2) +{ + if (dtype2 != NULL) { + return dtype2->type_num; + } else { + return dtype1->type_num; + } +} + /* validates the standard arguments to moving functions and set the original mask, original ndarray, and mask for the result */ static PyObject * @@ -480,7 +490,7 @@ check_mov_args(orig_arrayobj, span, 1, &orig_ndarray, &result_mask); - rtype = _CHKTYPENUM(dtype); + rtype = _get_type_num(((PyArrayObject*)orig_ndarray)->descr, dtype); result_ndarray = calc_mov_sum((PyArrayObject*)orig_ndarray, span, rtype); @@ -735,7 +745,11 @@ check_mov_args(orig_arrayobj, span, 1, &orig_ndarray, &result_mask); - rtype = _CHKTYPENUM(dtype); + if ((span % 2) == 0) { + rtype = _get_type_num_double(((PyArrayObject*)orig_ndarray)->descr, dtype); + } else { + rtype = _get_type_num(((PyArrayObject*)orig_ndarray)->descr, dtype); + } result_ndarray = calc_mov_median((PyArrayObject*)orig_ndarray, span, rtype); From scipy-svn at scipy.org Fri May 11 18:30:03 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 11 May 2007 17:30:03 -0500 (CDT) Subject: [Scipy-svn] r2985 - in trunk/Lib/sandbox/maskedarray: . tests Message-ID: <20070511223003.F3D5939C1D4@new.scipy.org> Author: pierregm Date: 2007-05-11 17:29:57 -0500 (Fri, 11 May 2007) New Revision: 2985 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/extras.py trunk/Lib/sandbox/maskedarray/tests/test_extras.py trunk/Lib/sandbox/maskedarray/testutils.py Log: testutils : comparing a scalar/array to masked fails. core : fixed a couple of docstring test_extras : fixed the notmasked_contiguous test extras : added the hsplit function Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-11 20:24:55 UTC (rev 2984) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-11 22:29:57 UTC (rev 2985) @@ -1722,7 +1722,7 @@ `axis` : Integer *[None]* Axis to be indirectly sorted (default -1) `fill_value` : var *[None]* - Default filling value. If None, uses the data type default. + Default filling value. If None, uses the minimum default for the data type. """ if fill_value is None: fill_value = minimum_fill_value(self) @@ -1733,7 +1733,7 @@ """Returns the array of indices for the maximum values of `a` along the specified axis. Masked values are treated as if they had the value `fill_value`. - If `fill_value` is None, the default for the data type is used. + If `fill_value` is None, the maximum default for the data type is used. Returns a numpy array. :Keywords: @@ -2632,20 +2632,6 @@ ################################################################################ if __name__ == '__main__': - import numpy as N - from maskedarray.testutils import assert_equal, assert_array_equal - marray = masked_array - # - if 0: - x = masked_array([1,2]) - y = x * masked - print y - assert_equal(y.shape, x.shape) - assert_equal(y._mask, [True, True]) - y = x + masked - assert_equal(y.shape, x.shape) - assert_equal(y._mask, [True, True]) - # if 1: - x = arange(64).reshape(8,8) - z = maximum(x) + x = arange(10) + assert(x.ctypes.data == x.filled().ctypes.data) \ No newline at end of file Modified: trunk/Lib/sandbox/maskedarray/extras.py =================================================================== --- trunk/Lib/sandbox/maskedarray/extras.py 2007-05-11 20:24:55 UTC (rev 2984) +++ trunk/Lib/sandbox/maskedarray/extras.py 2007-05-11 22:29:57 UTC (rev 2985) @@ -15,7 +15,7 @@ 'apply_along_axis', 'atleast_1d', 'atleast_2d', 'atleast_3d', 'average', 'vstack', 'hstack', 'dstack', 'row_stack', 'column_stack', 'compress_rowcols', 'compress_rows', 'compress_cols', 'count_masked', -'dot', +'dot', 'hsplit', 'mask_rowcols','mask_rows','mask_cols','masked_all','masked_all_like', 'mediff1d', 'mr_', 'notmasked_edges','notmasked_contiguous', @@ -155,6 +155,8 @@ column_stack = _fromnxfunction('column_stack') dstack = _fromnxfunction('dstack') +hsplit = _fromnxfunction('hsplit') + #####-------------------------------------------------------------------------- #---- #####-------------------------------------------------------------------------- Modified: trunk/Lib/sandbox/maskedarray/tests/test_extras.py =================================================================== --- trunk/Lib/sandbox/maskedarray/tests/test_extras.py 2007-05-11 20:24:55 UTC (rev 2984) +++ trunk/Lib/sandbox/maskedarray/tests/test_extras.py 2007-05-11 22:29:57 UTC (rev 2985) @@ -156,8 +156,9 @@ [1,1,1,1,1,1,1,1], [0,0,0,0,0,0,1,0],]) tmp = notmasked_contiguous(a, None) - assert_equal(tmp[-1], (6, (16,21))) - assert_equal(tmp[-2], (4, (0,3))) + assert_equal(tmp[-1], slice(23,23,None)) + assert_equal(tmp[-2], slice(16,21,None)) + assert_equal(tmp[-3], slice(0,3,None)) # tmp = notmasked_contiguous(a, 0) assert(len(tmp[-1]) == 1) @@ -166,9 +167,10 @@ assert(len(tmp[0]) == 2) # tmp = notmasked_contiguous(a, 1) - assert_equal(tmp[0][-1], (4, (0,3))) + assert_equal(tmp[0][-1], slice(0,3,None)) assert(tmp[1] is None) - assert_equal(tmp[2][-1], (6, (0,5))) + assert_equal(tmp[2][-1], slice(7,7,None)) + assert_equal(tmp[2][-2], slice(0,5,None)) class test_2dfunctions(NumpyTestCase): "Tests 2D functions" Modified: trunk/Lib/sandbox/maskedarray/testutils.py =================================================================== --- trunk/Lib/sandbox/maskedarray/testutils.py 2007-05-11 20:24:55 UTC (rev 2984) +++ trunk/Lib/sandbox/maskedarray/testutils.py 2007-05-11 22:29:57 UTC (rev 2985) @@ -18,7 +18,7 @@ from numpy.testing.utils import build_err_msg, rand import core -from core import mask_or, getmask, getmaskarray, masked_array, nomask +from core import mask_or, getmask, getmaskarray, masked_array, nomask, masked from core import filled, equal, less #------------------------------------------------------------------------------ @@ -67,6 +67,10 @@ assert desired == actual, msg return # Case #4. arrays or equivalent + if ((actual is masked) and not (desired is masked)) or \ + ((desired is masked) and not (actual is masked)): + msg = build_err_msg([actual, desired], err_msg, header='', names=('x', 'y')) + raise ValueError(msg) actual = N.array(actual, copy=False, subok=True) desired = N.array(desired, copy=False, subok=True) if actual.dtype.char in "OS" and desired.dtype.char in "OS": @@ -115,6 +119,12 @@ x = masked_array(xf, copy=False, mask=m).filled(fill_value) y = masked_array(yf, copy=False, mask=m).filled(fill_value) + + if ((x is masked) and not (y is masked)) or \ + ((y is masked) and not (x is masked)): + msg = build_err_msg([x, y], err_msg, header=header, names=('x', 'y')) + raise ValueError(msg) + if (x.dtype.char != "O") and (x.dtype.char != "S"): x = x.astype(float_) if isinstance(x, N.ndarray) and x.size > 1: @@ -195,4 +205,8 @@ assert(m2 is nomask) if m2 is nomask: assert(m1 is nomask) - assert_array_equal(m1, m2) \ No newline at end of file + assert_array_equal(m1, m2) + +if __name__ == '__main__': + a = 12 + assert_equal(a, masked) \ No newline at end of file From scipy-svn at scipy.org Fri May 11 22:04:58 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 11 May 2007 21:04:58 -0500 (CDT) Subject: [Scipy-svn] r2986 - in trunk/Lib/sandbox/maskedarray: . tests Message-ID: <20070512020458.131D039C015@new.scipy.org> Author: pierregm Date: 2007-05-11 21:04:48 -0500 (Fri, 11 May 2007) New Revision: 2986 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/tests/test_subclassing.py trunk/Lib/sandbox/maskedarray/testutils.py Log: MaskedArray core : fixed a bug w/ subok=False that prevented the mask to be inherited testutils : fixed assert_array_compare to force the compared elements to pure ndarray TimeSeries tseries : fixed the 'filled' method : allows 2D arrays to be passed to convert (thx to David Huard for the inspiration) : added David Huards function/method split tdates : guess_freq : returns FR_UND when only one element Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-11 22:29:57 UTC (rev 2985) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-12 02:04:48 UTC (rev 2986) @@ -996,7 +996,7 @@ if not isinstance(data, MaskedArray): _data = _data.view(cls) elif not subok: - _data = _data.view(cls) + _data = data.view(cls) else: _data = _data.view(type(data)) # Backwards compat ....... Modified: trunk/Lib/sandbox/maskedarray/tests/test_subclassing.py =================================================================== --- trunk/Lib/sandbox/maskedarray/tests/test_subclassing.py 2007-05-11 22:29:57 UTC (rev 2985) +++ trunk/Lib/sandbox/maskedarray/tests/test_subclassing.py 2007-05-12 02:04:48 UTC (rev 2986) @@ -134,10 +134,12 @@ mxsub = masked_array(xsub, subok=False) assert not isinstance(mxsub, MSubArray) assert isinstance(mxsub, MaskedArray) + assert_equal(mxsub._mask, m) # mxsub = masked_array(xsub, subok=True) assert isinstance(mxsub, MSubArray) assert_equal(mxsub.info, xsub.info) + assert_equal(mxsub._mask, xsub._mask) ################################################################################ Modified: trunk/Lib/sandbox/maskedarray/testutils.py =================================================================== --- trunk/Lib/sandbox/maskedarray/testutils.py 2007-05-11 22:29:57 UTC (rev 2985) +++ trunk/Lib/sandbox/maskedarray/testutils.py 2007-05-12 02:04:48 UTC (rev 2986) @@ -117,8 +117,8 @@ yf = filled(y) m = mask_or(getmask(x), getmask(y)) - x = masked_array(xf, copy=False, mask=m).filled(fill_value) - y = masked_array(yf, copy=False, mask=m).filled(fill_value) + x = masked_array(xf, copy=False, subok=False, mask=m).filled(fill_value) + y = masked_array(yf, copy=False, subok=False, mask=m).filled(fill_value) if ((x is masked) and not (y is masked)) or \ ((y is masked) and not (x is masked)): From scipy-svn at scipy.org Fri May 11 22:05:24 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 11 May 2007 21:05:24 -0500 (CDT) Subject: [Scipy-svn] r2987 - in trunk/Lib/sandbox/timeseries: . tests Message-ID: <20070512020524.AE14739C015@new.scipy.org> Author: pierregm Date: 2007-05-11 21:04:59 -0500 (Fri, 11 May 2007) New Revision: 2987 Modified: trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tests/test_dates.py trunk/Lib/sandbox/timeseries/tests/test_multitimeseries.py trunk/Lib/sandbox/timeseries/tests/test_timeseries.py trunk/Lib/sandbox/timeseries/tmulti.py trunk/Lib/sandbox/timeseries/tseries.py Log: MaskedArray core : fixed a bug w/ subok=False that prevented the mask to be inherited testutils : fixed assert_array_compare to force the compared elements to pure ndarray TimeSeries tseries : fixed the 'filled' method : allows 2D arrays to be passed to convert (thx to David Huard for the inspiration) : added David Huards function/method split tdates : guess_freq : returns FR_UND when only one element Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -458,7 +458,9 @@ Returns a frequency code (alpha character).""" ddif = numeric.asarray(numpy.diff(dates)) ddif.sort() - if ddif[0] == ddif[-1] == 1.: + if ddif.size == 0: + fcode = _c.FR_UND + elif ddif[0] == ddif[-1] == 1.: fcode = _c.FR_DAY elif (ddif[0] == 1.) and (ddif[-1] == 3.): fcode = _c.FR_BUS @@ -699,4 +701,4 @@ if 1: "Tests the automatic sorting of dates." D = date_array_fromlist(dlist=['2006-01','2005-01','2004-01'],freq='M') - assert_equal(D.view(ndarray), [24037, 24049, 24061]) \ No newline at end of file + assert_equal(D.view(ndarray), [24037, 24049, 24061]) Modified: trunk/Lib/sandbox/timeseries/tests/test_dates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_dates.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tests/test_dates.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -109,6 +109,10 @@ dobj = [DateFromString(d) for d in dlist] odates = date_array_fromlist(dobj) assert_equal(dates,odates) + # + D = date_array_fromlist(dlist=['2006-01']) + assert_equal(D.tovalue(), [732312, ]) + assert_equal(D.freq, C.FR_UND) print "finished test_fromsobjects" def test_consistent_value(self): Modified: trunk/Lib/sandbox/timeseries/tests/test_multitimeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_multitimeseries.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tests/test_multitimeseries.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -72,6 +72,7 @@ assert(mts['2007-01']._data == mrec[0]) assert_equal(mts['2007-01']._dates, dates[0]) # + assert(isinstance(mts.f0, TimeSeries)) assert_equal(mts.f0, time_series(d, dates=dates, mask=m)) assert_equal(mts.f1, time_series(d[::-1], dates=dates, mask=m[::-1])) assert((mts._fieldmask == N.core.records.fromarrays([m, m[::-1]])).all()) Modified: trunk/Lib/sandbox/timeseries/tests/test_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -28,7 +28,8 @@ from timeseries import tseries from timeseries import Date, date_array_fromlist, date_array, thisday from timeseries import time_series, TimeSeries, adjust_endpoints, \ - mask_period, align_series, fill_missing_dates, tsmasked, concatenate_series + mask_period, align_series, fill_missing_dates, tsmasked, concatenate_series,\ + stack, split class test_creation(NumpyTestCase): "Base test class for MaskedArrays." @@ -76,15 +77,14 @@ "Tests the creation of a series from a datearray" _, dates, _ = self.d data = dates - + # series = time_series(data, dates) assert(isinstance(series, TimeSeries)) assert_equal(series._dates, dates) assert_equal(series._data, data) assert_equal(series.freqstr, 'D') - + # series[5] = MA.masked - # ensure that series can be represented by a string after masking a value # (there was a bug before that prevented this from working when using a # DateArray for the data) @@ -99,7 +99,7 @@ assert_equal(series._data.size, 15) def test_unsorted(self): - "Tests that the data are porperly sorted along the dates." + "Tests that the data are properly sorted along the dates." dlist = ['2007-01-%02i' % i for i in (3,2,1)] data = [10,20,30] series = time_series(data,dlist) @@ -368,6 +368,19 @@ assert_array_equal(shift_negative, shift_negative_result) assert_array_equal(shift_positive, shift_positive_result) # + def test_split(self): + """Test the split function.""" + ms = time_series(N.arange(62).reshape(31,2), + start_date=Date(freq='d', year=2005, month=7, day=1)) + d1,d2 = split(ms) + assert_array_equal(d1.data, ms.data[:,0]) + assert_array_equal(d1.dates, ms.dates) + assert_array_equal(d2.data, ms.data[:,1]) + + series = self.d[0] + ss = split(series)[0] + assert_array_equal(series, ss) + # def test_convert(self): """Test convert function @@ -379,6 +392,8 @@ start_date=Date(freq='m', year=2005, month=6)) highFreqSeries = time_series(N.arange(100), start_date=Date(freq='b', year=2005, month=6, day=1)) + ndseries = time_series(N.arange(124).reshape(62,2), + start_date=Date(freq='d', year=2005, month=7, day=1)) lowToHigh_start = lowFreqSeries.convert('B', position='START') @@ -411,6 +426,8 @@ (Date(freq='b', year=2005, month=6, day=1) + 99).asfreq('M')) assert_array_equal(lowFreqSeries, lowFreqSeries.convert("M")) + + assert_equal(ndseries.convert('M',sum), [[930,961],[2852,2883]]) # def test_fill_missing_dates(self): """Test fill_missing_dates function""" @@ -451,8 +468,6 @@ inplace=False) result = N.array([0,0,0,0,0,1,1,1,1,1,1,1,0,0,0]) assert_equal(mask._mask, result.repeat(2).reshape(-1,2)) - - # def test_pickling(self): "Tests pickling/unpickling" @@ -578,7 +593,6 @@ - ############################################################################### #------------------------------------------------------------------------------ if __name__ == "__main__": Modified: trunk/Lib/sandbox/timeseries/tmulti.py =================================================================== --- trunk/Lib/sandbox/timeseries/tmulti.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tmulti.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -122,7 +122,8 @@ if isinstance(obj, (MaskedRecords)): self.__dict__.update(_fieldmask=obj._fieldmask, _hardmask=obj._hardmask, - _fill_value=obj._fill_value, + _fill_value=obj._fill_value, + _names = obj.dtype.names ) if isinstance(obj, MultiTimeSeries): self.__dict__.update(observed=obj.observed, @@ -135,7 +136,8 @@ observed=None, _fieldmask = nomask, _hardmask = False, - fill_value = None + fill_value = None, + _names = self.dtype.names ) return @@ -152,27 +154,14 @@ #...................................................... def __getattribute__(self, attr): - return MaskedRecords.__getattribute__(self,attr) -# try: -# # Returns a generic attribute -# return object.__getattribute__(self,attr) -# except AttributeError: -# # OK, so attr must be a field name -# pass -# # Get the list of fields ...... -# _names = self.dtype.names -# _local = self.__dict__ -# _mask = _local['_fieldmask'] -# if attr in _names: -# _data = self._data -# obj = numeric.asarray(_data.__getattribute__(attr)).view(MaskedArray) -# obj._mask = make_mask(_mask.__getattribute__(attr)) -# return obj -# elif attr == '_mask': -# if self.size > 1: -# return _mask.view((bool_, len(self.dtype))).all(1) -# return _mask.view((bool_, len(self.dtype))) -# raise AttributeError,"No attribute '%s' !" % attr + getattribute = MaskedRecords.__getattribute__ + _dict = getattribute(self,'__dict__') + if attr in _dict.get('_names',[]): + obj = getattribute(self,attr).view(TimeSeries) + obj._dates = _dict['_dates'] + return obj + return getattribute(self,attr) + def __setattr__(self, attr, val): newattr = attr not in self.__dict__ @@ -526,7 +515,9 @@ mts = MultiTimeSeries(mrec,dates) self_data = [d, m, mrec, dlist, dates, ts, mts] - if 1: + assert(isinstance(mts.f0, TimeSeries)) + + if 0: mts[:2] = 5 assert_equal(mts.f0._data, [5,5,2,3,4]) assert_equal(mts.f1._data, [5,5,2,1,0]) Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-12 02:04:48 UTC (rev 2986) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-12 02:04:59 UTC (rev 2987) @@ -28,7 +28,7 @@ import maskedarray as MA from maskedarray import MaskedArray, MAError, masked, nomask, \ - filled, getmask, getmaskarray, make_mask_none, mask_or, make_mask, \ + filled, getmask, getmaskarray, hsplit, make_mask_none, mask_or, make_mask, \ masked_array import tcore as corelib @@ -49,7 +49,7 @@ 'time_series', 'tsmasked', 'mask_period','mask_inside_period','mask_outside_period','compressed', 'adjust_endpoints','align_series','aligned','convert','group_byperiod', -'pct','tshift','fill_missing_dates', 'stack', 'concatenate_series', +'pct','tshift','fill_missing_dates', 'split', 'stack', 'concatenate_series', 'empty_like', 'day_of_week','day_of_year','day','month','quarter','year', 'hour','minute','second', @@ -505,14 +505,14 @@ it is enabled. Otherwise fill with fill value. """ desc = """\ -timeseries(data = +timeseries( %(data)s, dates = %(time)s, freq = %(freq)s) """ desc_short = """\ -timeseries(data = %(data)s, +timeseries(%(data)s, dates = %(time)s, freq = %(freq)s) """ @@ -717,6 +717,30 @@ result = super(TimeSeries, self).transpose(*axes) result._dates = self._dates return result + + def split(self): + """Split a multiple series into individual columns.""" + if self.ndim == 1: + return [self] + else: + n = self.shape[1] + arr = hsplit(self, n)[0] + return [self.__class__(numpy.squeeze(a), + self._dates, + **_attrib_dict(self)) for a in arr] + + def filled(self, fill_value=None): + """Returns an array of the same class as `_data`, + with masked values filled with `fill_value`. +Subclassing is preserved. + +If `fill_value` is None, uses self.fill_value. + """ + result = self._series.filled(fill_value=fill_value).view(type(self)) + result._dates = self._dates + result.copy_attributes(self) + return result + #...................................................... def copy_attributes(self, oldseries, exclude=[]): "Copies the attributes from oldseries if they are not in the exclude list." @@ -850,6 +874,9 @@ hour = _frommethod('hour') minute = _frommethod('minute') second = _frommethod('second') + +split = _frommethod('split') + # ##### --------------------------------------------------------------------------- #---- ... Additional methods ... @@ -1172,8 +1199,8 @@ return [adjust_endpoints(x, start_date, end_date) for x in series] aligned = align_series #.................................................................... -def convert(series, freq, func='auto', position='END'): - """Converts a series to a frequency. +def _convert1d(series, freq, func='auto', position='END'): + """Converts a series to a frequency. Private function called by convert When converting to a lower frequency, func is a function that acts on a 1-d array and returns a scalar or 1-d array. func should handle @@ -1207,7 +1234,8 @@ "Cannot adjust a series with missing or duplicated dates." if position.upper() not in ('END','START'): - raise ValueError("invalid value for position argument: (%s)",str(position)) + raise ValueError("Invalid value for position argument: (%s). "\ + "Should be in ['END','START']," % str(position)) start_date = series._dates[0] @@ -1241,6 +1269,37 @@ newseries.copy_attributes(series) return newseries +def convert(series, freq, func='auto', position='END'): + """Converts a series to a frequency. Private function called by convert + + When converting to a lower frequency, func is a function that acts + on a 1-d array and returns a scalar or 1-d array. func should handle + masked values appropriately. If func is "auto", then an + appropriate function is determined based on the observed attribute + of the series. If func is None, then a 2D array is returned, where each + column represents the values appropriately grouped into the new frequency. + interp and position will be ignored in this case. + + When converting to a higher frequency, position is 'START' or 'END' + and determines where the data point is in each period (eg. if going + from monthly to daily, and position is 'END', then each data point is + placed at the end of the month). + """ + if series.ndim == 1: + obj = _convert1d(series, freq, func, position) + elif series.ndim == 2: + base = _convert1d(series[:,0], freq, func, position) + obj = MA.column_stack([_convert1d(m,freq,func,position)._series + for m in series.split()]).view(type(series)) + obj._dates = base._dates + if func is None or (func,series.observed) == ('auto','UNDEFINED'): + shp = obj.shape + ncols = base.shape[-1] + obj.shape = (shp[0], shp[-1]//ncols, ncols) + obj = numpy.swapaxes(obj,1,2) + return obj + + def group_byperiod(series, freq, position='END'): """Converts a series to a frequency, without any processing. If the series has missing data, it is first filled with masked data. Duplicate values in the @@ -1422,7 +1481,7 @@ # return time_series(newdata.reshape(nshp), newdates) #............................................................................... def stack(*series): - """performs a column_stack on the data from each series, and the + """Performs a column_stack on the data from each series, and the resulting series has the same dates as each individual series. All series must be date compatible. @@ -1513,4 +1572,24 @@ # ensure that series can be represented by a string after masking a value # (there was a bug before that prevented this from working when using a # DateArray for the data) - strrep = str(series) \ No newline at end of file + strrep = str(series) + + if 0: + series = time_series(numpy.arange(1,501), + start_date=Date('D', string='2007-01-01')) + mseries = convert(series, 'M') + aseries = convert(mseries, 'A') + (freq, func, position) = ('A', None, 'END') + + tmp = mseries[:,0].convert('A') + aseries = MA.concatenate([_convert1d(m,'A')._series for m in mseries.split()], + axis=-1).view(type(series)) + aseries._dates = tmp._dates + shp = aseries.shape + aseries.shape = (shp[0], shp[-1]//tmp.shape[-1], tmp.shape[-1]) + numpy.swapaxes(aseries,1,2) + + if 1: + series = time_series(N.arange(124).reshape(62,2), + start_date=Date(freq='d', year=2005, month=7, day=1)) + assert_equal(series.convert('M',sum), [[930,961],[2852,2883]]) \ No newline at end of file From scipy-svn at scipy.org Sun May 13 12:32:25 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 11:32:25 -0500 (CDT) Subject: [Scipy-svn] r2988 - in trunk/Lib/sandbox/maskedarray: . tests Message-ID: <20070513163225.5E5A939C0D8@new.scipy.org> Author: pierregm Date: 2007-05-13 11:32:22 -0500 (Sun, 13 May 2007) New Revision: 2988 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/tests/test_core.py Log: maskedarray.core : fixed the .reshape method maskedarray.core : fixed compressed when _smallmask is False maskedarray.core : fixed ravel when _smallmask is False Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-12 02:04:59 UTC (rev 2987) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-13 16:32:22 UTC (rev 2988) @@ -913,6 +913,7 @@ mask = self.obj._mask cls = type(self.obj) result = getattr(data, methodname)(*args, **params).view(cls) + result._smallmask = self.obj._smallmask if result.ndim: if not self._onmask: result._mask = mask @@ -1115,6 +1116,7 @@ if hasattr(dout, 'shape') and len(dout.shape) > 0: # Not a scalar: make sure that dout is a MA dout = dout.view(type(self)) + dout._smallmask = self._smallmask if m is not nomask: # use _set_mask to take care of the shape dout.__setmask__(m[indx]) @@ -1179,8 +1181,8 @@ If `value` is masked, masks those locations.""" self.__setitem__(slice(i,j), value) #............................................ - def __setmask__(self, mask): - newmask = make_mask(mask, copy=False, small_mask=self._smallmask) + def __setmask__(self, mask, copy=False): + newmask = make_mask(mask, copy=copy, small_mask=self._smallmask) # self.unshare_mask() if self._mask is nomask: self._mask = newmask @@ -1292,6 +1294,8 @@ d = self.ravel() if self._mask is nomask: return d + elif not self._smallmask and not self._mask.any(): + return d else: return d[numeric.logical_not(d._mask)] #............................................ @@ -1440,10 +1444,11 @@ """Reshapes the array to shape s. Returns a new masked array. If you want to modify the shape in place, please use `a.shape = s`""" - # TODO: Do we keep super, or reshape _data and take a view ? - result = super(MaskedArray, self).reshape(*s) - if self._mask is not nomask: - result._mask = self._mask.reshape(*s) + result = self._data.reshape(*s).view(type(self)) + result.__dict__.update(self.__dict__) + if result._mask is not nomask: + result._mask = self._mask.copy() + result._mask.shape = result.shape return result # repeat = _arraymethod('repeat') @@ -2634,4 +2639,10 @@ if __name__ == '__main__': if 1: x = arange(10) - assert(x.ctypes.data == x.filled().ctypes.data) \ No newline at end of file + assert(x.ctypes.data == x.filled().ctypes.data) + if 1: + a = array([1,2,3,4],mask=[0,0,0,0],small_mask=False) + assert(a.ravel()._mask, [0,0,0,0]) + assert(a.compressed(), a) + a[0] = masked + assert(a.compressed()._mask, [0,0,0]) \ No newline at end of file Modified: trunk/Lib/sandbox/maskedarray/tests/test_core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/tests/test_core.py 2007-05-12 02:04:59 UTC (rev 2987) +++ trunk/Lib/sandbox/maskedarray/tests/test_core.py 2007-05-13 16:32:22 UTC (rev 2988) @@ -1178,7 +1178,35 @@ aravel = a.ravel() assert_equal(a.shape,(1,5)) assert_equal(a._mask.shape, a.shape) + # Checs that small_mask is preserved + a = array([1,2,3,4],mask=[0,0,0,0],small_mask=False) + assert_equal(a.ravel()._mask, [0,0,0,0]) + def check_reshape(self): + "Tests reshape" + x = arange(4) + x[0] = masked + y = x.reshape(2,2) + assert_equal(y.shape, (2,2,)) + assert_equal(y._mask.shape, (2,2,)) + assert_equal(x.shape, (4,)) + assert_equal(x._mask.shape, (4,)) + + def check_compressed(self): + "Tests compressed" + a = array([1,2,3,4],mask=[0,0,0,0],small_mask=False) + b = a.compressed() + assert_equal(b, a) + assert_equal(b._mask, a._mask) + a[0] = masked + b = a.compressed() + assert_equal(b._data, [2,3,4]) + assert_equal(b._mask, [0,0,0]) + a._smallmask = True + b = a.compressed() + assert_equal(b._data, [2,3,4]) + assert_equal(b._mask, nomask) + #.............................................................................. ############################################################################### From scipy-svn at scipy.org Sun May 13 12:46:53 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 11:46:53 -0500 (CDT) Subject: [Scipy-svn] r2989 - in trunk/Lib/optimize: . tests Message-ID: <20070513164653.6C07739C0D8@new.scipy.org> Author: ondrej Date: 2007-05-13 11:46:21 -0500 (Sun, 13 May 2007) New Revision: 2989 Added: trunk/Lib/optimize/nonlin.py trunk/Lib/optimize/tests/test_nonlin.py Modified: trunk/Lib/optimize/__init__.py Log: Patch implementing nonlinear solvers, see the ticket 402. Modified: trunk/Lib/optimize/__init__.py =================================================================== --- trunk/Lib/optimize/__init__.py 2007-05-13 16:32:22 UTC (rev 2988) +++ trunk/Lib/optimize/__init__.py 2007-05-13 16:46:21 UTC (rev 2989) @@ -11,6 +11,7 @@ from lbfgsb import fmin_l_bfgs_b from tnc import fmin_tnc from cobyla import fmin_cobyla +import nonlin __all__ = filter(lambda s:not s.startswith('_'),dir()) from numpy.testing import NumpyTest Added: trunk/Lib/optimize/nonlin.py =================================================================== --- trunk/Lib/optimize/nonlin.py 2007-05-13 16:32:22 UTC (rev 2988) +++ trunk/Lib/optimize/nonlin.py 2007-05-13 16:46:21 UTC (rev 2989) @@ -0,0 +1,466 @@ +""" +Nonlinear solvers +================= + +These solvers find x for which F(x)=0. Both x and F is multidimensional. + +They accept the user defined function F, which accepts a python tuple x and it +should return F(x), which can be either a tuple, or numpy array. + +Example: + +def F(x): + "Should converge to x=[0,0,0,0,0]" + d=numpy.array([3,2,1.5,1,0.5]) + c=0.01 + return -d*numpy.array(x)-c*numpy.array(x)**3 + +x= solvers.broyden2(F,[1,1,1,1,1]) + +All solvers have the parameter iter (the number of iterations to compute), some +of them have other parameters of the solver, see the particular solver for +details. + + A collection of general-purpose nonlinear multidimensional solvers. + + broyden1 -- Broyden's first method - is a quasi-Newton-Raphson + method for updating an approximate Jacobian and then + inverting it + broyden2 -- Broyden's second method - the same as broyden1, but + updates the inverse Jacobian directly + broyden3 -- Broyden's second method - the same as broyden2, but + instead of directly computing the inverse Jacobian, + it remembers how to construct it using vectors, and + when computing inv(J)*F, it uses those vectors to + compute this product, thus avoding the expensive NxN + matrix multiplication. + broyden_generalized -- Generalized Broyden's method, the same as broyden2, + but instead of approximating the full NxN Jacobian, + it construct it at every iteration in a way that + avoids the NxN matrix multiplication. This is not + as precise as broyden3. + anderson -- extended Anderson method, the same as the + broyden_generalized, but added w_0^2*I to before + taking inversion to improve the stability + anderson2 -- the Anderson method, the same as anderson, but + formulated differently + + The broyden2 is the best. For large systems, use broyden3. excitingmixing is + also very effective. There are some more solvers implemented (see their + docstrings), however, those are of mediocre quality. + + + Utility Functions + + norm -- Returns an L2 norm of the vector + +""" + +import math + +import numpy + +def mlog(x): + if x==0.: + return 13 + else: + return math.log(x) + +def norm(v): + """Returns an L2 norm of the vector.""" + return math.sqrt(numpy.sum((numpy.array(v)**2).flat)) + +def myF(F,xm): + return numpy.matrix(F(tuple(xm.flat))).T + +def difference(a,b): + m=0. + for x,y in zip(a,b): + m+=(x-y)**2 + return math.sqrt(m) + +def sum(a,b): + return [ai+bi for ai,bi in zip(a,b)] + +def mul(C,b): + return [C*bi for bi in b] + +def solve(A,b): + """Solve Ax=b, returns x""" + try: + from scipy import linalg + return linalg.solve(A,b) + except: + return A.I*b + +def broyden2(F, xin, iter=10, alpha=0.4, verbose = False): + """Broyden's second method. + + Updates inverse Jacobian by an optimal formula. + There is NxN matrix multiplication in every iteration. + + The best norm |F(x)|=0.003 achieved in ~20 iterations. + + Recommended. + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + Gm=-alpha*numpy.matrix(numpy.identity(len(xin))) + for n in range(iter): + deltaxm=-Gm*Fxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + Gm=Gm+(deltaxm-Gm*deltaFxm)*deltaFxm.T/norm(deltaFxm)**2 + if verbose: + print "%d: |F(x)|=%.3f"%(n+1, norm(Fxm)) + return xm.flat + +def broyden3(F, xin, iter=10, alpha=0.4, verbose = False): + """Broyden's second method. + + Updates inverse Jacobian by an optimal formula. + The NxN matrix multiplication is avoided. + + The best norm |F(x)|=0.003 achieved in ~20 iterations. + + Recommended. + """ + zy=[] + def updateG(z,y): + "G:=G+z*y.T" + zy.append((z,y)) + def Gmul(f): + "G=-alpha*1+z*y.T+z*y.T ..." + s=-alpha*f + for z,y in zy: + s=s+z*(y.T*f) + return s + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) +# Gm=-alpha*numpy.matrix(numpy.identity(len(xin))) + for n in range(iter): + #deltaxm=-Gm*Fxm + deltaxm=Gmul(-Fxm) + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + #Gm=Gm+(deltaxm-Gm*deltaFxm)*deltaFxm.T/norm(deltaFxm)**2 + updateG(deltaxm-Gmul(deltaFxm),deltaFxm/norm(deltaFxm)**2) + if verbose: + print "%d: |F(x)|=%.3f"%(n+1, norm(Fxm)) + return xm.flat + +def broyden_generalized(F, xin, iter=10, alpha=0.1, M=5, verbose = False): + """Generalized Broyden's method. + + Computes an approximation to the inverse Jacobian from the last M + interations. Avoids NxN matrix multiplication, it only has MxM matrix + multiplication and inversion. + + M=0 .... linear mixing + M=1 .... Anderson mixing with 2 iterations + M=2 .... Anderson mixing with 3 iterations + etc. + optimal is M=5 + + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + G0=-alpha + dxm=[] + dFxm=[] + for n in range(iter): + deltaxm=-G0*Fxm + if M>0: + MM=min(M,n) + for m in range(n-MM,n): + deltaxm=deltaxm-(float(gamma[m-(n-MM)])*dxm[m]-G0*dFxm[m]) + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + + if M>0: + dxm.append(deltaxm) + dFxm.append(deltaFxm) + MM=min(M,n+1) + a=numpy.matrix(numpy.empty((MM,MM))) + for i in range(n+1-MM,n+1): + for j in range(n+1-MM,n+1): + a[i-(n+1-MM),j-(n+1-MM)]=dFxm[i].T*dFxm[j] + + dFF=numpy.matrix(numpy.empty(MM)).T + for k in range(n+1-MM,n+1): + dFF[k-(n+1-MM)]=dFxm[k].T*Fxm + gamma=a.I*dFF + + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm.flat + +def anderson(F, xin, iter=10, alpha=0.1, M=5, w0=0.01, verbose = False): + """Extended Anderson method. + + Computes an approximation to the inverse Jacobian from the last M + interations. Avoids NxN matrix multiplication, it only has MxM matrix + multiplication and inversion. + + M=0 .... linear mixing + M=1 .... Anderson mixing with 2 iterations + M=2 .... Anderson mixing with 3 iterations + etc. + optimal is M=5 + + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + dxm=[] + dFxm=[] + for n in range(iter): + deltaxm=alpha*Fxm + if M>0: + MM=min(M,n) + for m in range(n-MM,n): + deltaxm=deltaxm-(float(gamma[m-(n-MM)])*dxm[m]+alpha*dFxm[m]) + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + + if M>0: + dxm.append(deltaxm) + dFxm.append(deltaFxm) + MM=min(M,n+1) + a=numpy.matrix(numpy.empty((MM,MM))) + for i in range(n+1-MM,n+1): + for j in range(n+1-MM,n+1): + if i==j: wd=w0**2 + else: wd=0 + a[i-(n+1-MM),j-(n+1-MM)]=(1+wd)*dFxm[i].T*dFxm[j] + + dFF=numpy.matrix(numpy.empty(MM)).T + for k in range(n+1-MM,n+1): + dFF[k-(n+1-MM)]=dFxm[k].T*Fxm + gamma=solve(a,dFF) +# print gamma + + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm.flat + +def anderson2(F, xin, iter=10, alpha=0.1, M=5, w0=0.01, verbose = False): + """Anderson method. + + M=0 .... linear mixing + M=1 .... Anderson mixing with 2 iterations + M=2 .... Anderson mixing with 3 iterations + etc. + optimal is M=5 + + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + dFxm=[] + for n in range(iter): + deltaxm=Fxm + if M>0: + MM=min(M,n) + for m in range(n-MM,n): + deltaxm=deltaxm+float(theta[m-(n-MM)])*(dFxm[m]-Fxm) + deltaxm=deltaxm*alpha + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + + if M>0: + dFxm.append(Fxm-deltaFxm) + MM=min(M,n+1) + a=numpy.matrix(numpy.empty((MM,MM))) + for i in range(n+1-MM,n+1): + for j in range(n+1-MM,n+1): + if i==j: wd=w0**2 + else: wd=0 + a[i-(n+1-MM),j-(n+1-MM)]= \ + (1+wd)*(Fxm-dFxm[i]).T*(Fxm-dFxm[j]) + + dFF=numpy.matrix(numpy.empty(MM)).T + for k in range(n+1-MM,n+1): + dFF[k-(n+1-MM)]=(Fxm-dFxm[k]).T*Fxm + theta=solve(a,dFF) +# print gamma + + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm.flat + +def broyden_modified(F, xin, iter=10, alpha=0.35, w0=0.01, wl=5, verbose = False): + """Modified Broyden's method. + + Updates inverse Jacobian using information from all the iterations and + avoiding the NxN matrix multiplication. The problem is with the weights, + it converges the same or worse than broyden2 or broyden_generalized + + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + G0=alpha + w=[] + u=[] + dFxm=[] + for n in range(iter): + deltaxm=G0*Fxm + for i in range(n): + for j in range(n): + deltaxm-=w[i]*w[j]*betta[i,j]*u[j]*(dFxm[i].T*Fxm) + xm+=deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + + w.append(wl/norm(Fxm)) + + u.append((G0*deltaFxm+deltaxm)/norm(deltaFxm)) + dFxm.append(deltaFxm/norm(deltaFxm)) + a=numpy.matrix(numpy.empty((n+1,n+1))) + for i in range(n+1): + for j in range(n+1): + a[i,j]=w[i]*w[j]*dFxm[j].T*dFxm[i] + betta=(w0**2*numpy.matrix(numpy.identity(n+1))+a).I + + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm.flat + +def broyden1(F, xin, iter=10, alpha=0.1, verbose = False): + """Broyden's first method. + + Updates Jacobian and computes inv(J) by a matrix inversion at every + iteration. It's very slow. + + The best norm |F(x)|=0.005 achieved in ~45 iterations. + """ + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + Jm=-1/alpha*numpy.matrix(numpy.identity(len(xin))) + + for n in range(iter): + deltaxm=solve(-Jm,Fxm) + #!!!! What the fuck?! + #xm+=deltaxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + Jm=Jm+(deltaFxm-Jm*deltaxm)*deltaxm.T/norm(deltaxm)**2 + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm.flat + +def broyden1_modified(F, xin, iter=10, alpha=0.1, verbose = False): + """Broyden's first method, modified by O. Certik. + + Updates inverse Jacobian using some matrix identities at every iteration, + its faster then newton_slow, but still not optimal. + + The best norm |F(x)|=0.005 achieved in ~45 iterations. + """ + def inv(A,u,v): + + #interesting is that this + #return (A.I+u*v.T).I + #is more stable than + #return A-A*u*v.T*A/float(1+v.T*A*u) + Au=A*u + return A-Au*(v.T*A)/float(1+v.T*Au) + xm=numpy.matrix(xin).T + Fxm=myF(F,xm) + Jm=alpha*numpy.matrix(numpy.identity(len(xin))) + for n in range(iter): + deltaxm=Jm*Fxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 +# print "-------------",norm(deltaFxm),norm(deltaxm) + deltaFxm/=norm(deltaxm) + deltaxm/=norm(deltaxm) + Jm=inv(Jm+deltaxm*deltaxm.T*Jm,-deltaFxm,deltaxm) + + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm + +def vackar(F, xin, iter=10, alpha=0.1, verbose = False): + """J=diag(d1,d2,...,dN) + + The best norm |F(x)|=0.005 achieved in ~110 iterations. + """ + def myF(F,xm): + return numpy.array(F(tuple(xm.flat))).T + xm=numpy.array(xin) + Fxm=myF(F,xm) + d=1/alpha*numpy.ones(len(xin)) + Jm=numpy.matrix(numpy.diag(d)) + + for n in range(iter): + deltaxm=1/d*Fxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + d=d-(deltaFxm+d*deltaxm)*deltaxm/norm(deltaxm)**2 + if verbose: + print "%d: |F(x)|=%.3f"%(n, norm(Fxm)) + return xm + +def linearmixing(F,xin, iter=10, alpha=0.1, verbose = False): + """J=-1/alpha + + The best norm |F(x)|=0.005 achieved in ~140 iterations. + """ + def myF(F,xm): + return numpy.array(F(tuple(xm.flat))).T + xm=numpy.array(xin) + Fxm=myF(F,xm) + for n in range(iter): + deltaxm=alpha*Fxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + Fxm=Fxm1 + if verbose: + print "%d: |F(x)|=%.3f" %(n,norm(Fxm)) + + return xm + +def excitingmixing(F,xin,iter=10,alpha=0.1,alphamax=1.0, verbose = False): + """J=-1/alpha + + The best norm |F(x)|=0.005 achieved in ~140 iterations. + """ + def myF(F,xm): + return numpy.array(F(tuple(xm.flat))).T + xm=numpy.array(xin) + beta=numpy.array([alpha]*len(xm)) + Fxm=myF(F,xm) + for n in range(iter): + deltaxm=beta*Fxm + xm=xm+deltaxm + Fxm1=myF(F,xm) + deltaFxm=Fxm1-Fxm + for i in range(len(xm)): + if Fxm1[i]*Fxm[i] > 0: + beta[i]=beta[i]+alpha + if beta[i] > alphamax: + beta[i] = alphamax + else: + beta[i]=alpha + Fxm=Fxm1 + if verbose: + print "%d: |F(x)|=%.3f" %(n,norm(Fxm)) + + return xm Added: trunk/Lib/optimize/tests/test_nonlin.py =================================================================== --- trunk/Lib/optimize/tests/test_nonlin.py 2007-05-13 16:32:22 UTC (rev 2988) +++ trunk/Lib/optimize/tests/test_nonlin.py 2007-05-13 16:46:21 UTC (rev 2989) @@ -0,0 +1,95 @@ +""" Unit tests for nonlinear solvers +Author: Ondrej Certik +May 2007 +""" + +from numpy.testing import * + +set_package_path() +from scipy.optimize import nonlin +from numpy import matrix, diag +restore_path() + +def F(x): + def p3(y): + return float(y.T*y)*y + try: + x=tuple(x.flat) + except: + pass + x=matrix(x).T + + d=matrix(diag([3,2,1.5,1,0.5])) + c=0.01 + f=-d*x-c*p3(x) + + return tuple(f.flat) + +class test_nonlin(NumpyTestCase): + """ Test case for a simple constrained entropy maximization problem + (the machine translation example of Berger et al in + Computational Linguistics, vol 22, num 1, pp 39--72, 1996.) + """ + def setUp(self): + self.xin=[1,1,1,1,1] + + + def test_linearmixing(self): + x = nonlin.linearmixing(F,self.xin,iter=60,alpha=0.5) + assert nonlin.norm(x)<1e-7 + assert nonlin.norm(F(x))<1e-7 + + def test_broyden1(self): + x= nonlin.broyden1(F,self.xin,iter=11,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + def test_broyden2(self): + x= nonlin.broyden2(F,self.xin,iter=12,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + def test_broyden3(self): + x= nonlin.broyden3(F,self.xin,iter=12,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + def test_exciting(self): + x= nonlin.excitingmixing(F,self.xin,iter=20,alpha=0.5) + assert nonlin.norm(x)<1e-5 + assert nonlin.norm(F(x))<1e-5 + + def test_anderson(self): + x= nonlin.anderson(F,self.xin,iter=12,alpha=0.03,M=5) + assert nonlin.norm(x)<0.33 + + def test_anderson2(self): + x= nonlin.anderson2(F,self.xin,iter=12,alpha=0.6,M=5) + assert nonlin.norm(x)<0.2 + + def test_broydengeneralized(self): + x= nonlin.broyden_generalized(F,self.xin,iter=60,alpha=0.5,M=0) + assert nonlin.norm(x)<1e-7 + assert nonlin.norm(F(x))<1e-7 + x= nonlin.broyden_generalized(F,self.xin,iter=61,alpha=0.1,M=1) + assert nonlin.norm(x)<2e-4 + assert nonlin.norm(F(x))<2e-4 + + def xtest_broydenmodified(self): + x= nonlin.broyden_modified(F,self.xin,iter=12,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + def test_broyden1modified(self): + x= nonlin.broyden1_modified(F,self.xin,iter=35,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + def test_vackar(self): + x= nonlin.vackar(F,self.xin,iter=11,alpha=1) + assert nonlin.norm(x)<1e-9 + assert nonlin.norm(F(x))<1e-9 + + +if __name__ == "__main__": + NumpyTest().run() From scipy-svn at scipy.org Sun May 13 13:05:13 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 12:05:13 -0500 (CDT) Subject: [Scipy-svn] r2990 - trunk/Lib/optimize Message-ID: <20070513170513.11F9839C0D8@new.scipy.org> Author: ondrej Date: 2007-05-13 12:05:11 -0500 (Sun, 13 May 2007) New Revision: 2990 Modified: trunk/Lib/optimize/info.py Log: update the documentation Modified: trunk/Lib/optimize/info.py =================================================================== --- trunk/Lib/optimize/info.py 2007-05-13 16:46:21 UTC (rev 2989) +++ trunk/Lib/optimize/info.py 2007-05-13 17:05:11 UTC (rev 2990) @@ -59,7 +59,30 @@ fixed_point -- Single-variable fixed-point solver. + A collection of general-purpose nonlinear multidimensional solvers. + broyden1 -- Broyden's first method - is a quasi-Newton-Raphson + method for updating an approximate Jacobian and then + inverting it + broyden2 -- Broyden's second method - the same as broyden1, but + updates the inverse Jacobian directly + broyden3 -- Broyden's second method - the same as broyden2, but + instead of directly computing the inverse Jacobian, + it remembers how to construct it using vectors, and + when computing inv(J)*F, it uses those vectors to + compute this product, thus avoding the expensive NxN + matrix multiplication. + broyden_generalized -- Generalized Broyden's method, the same as broyden2, + but instead of approximating the full NxN Jacobian, + it construct it at every iteration in a way that + avoids the NxN matrix multiplication. This is not + as precise as broyden3. + anderson -- extended Anderson method, the same as the + broyden_generalized, but added w_0^2*I to before + taking inversion to improve the stability + anderson2 -- the Anderson method, the same as anderson, but + formulated differently + Utility Functions line_search -- Return a step that satisfies the strong Wolfe conditions. From scipy-svn at scipy.org Sun May 13 13:16:00 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 12:16:00 -0500 (CDT) Subject: [Scipy-svn] r2991 - trunk/Lib/optimize Message-ID: <20070513171600.569EF39C0D8@new.scipy.org> Author: ondrej Date: 2007-05-13 12:15:57 -0500 (Sun, 13 May 2007) New Revision: 2991 Modified: trunk/Lib/optimize/nonlin.py Log: updated documentation in nonlin Modified: trunk/Lib/optimize/nonlin.py =================================================================== --- trunk/Lib/optimize/nonlin.py 2007-05-13 17:05:11 UTC (rev 2990) +++ trunk/Lib/optimize/nonlin.py 2007-05-13 17:15:57 UTC (rev 2991) @@ -11,11 +11,13 @@ def F(x): "Should converge to x=[0,0,0,0,0]" - d=numpy.array([3,2,1.5,1,0.5]) - c=0.01 + import numpy + d = numpy.array([3,2,1.5,1,0.5]) + c = 0.01 return -d*numpy.array(x)-c*numpy.array(x)**3 -x= solvers.broyden2(F,[1,1,1,1,1]) +from scipy.optimize import nonlin +x = nonlin.broyden2(F,[1,1,1,1,1]) All solvers have the parameter iter (the number of iterations to compute), some of them have other parameters of the solver, see the particular solver for From scipy-svn at scipy.org Sun May 13 14:11:59 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 13:11:59 -0500 (CDT) Subject: [Scipy-svn] r2992 - in trunk: . Lib/interpolate Message-ID: <20070513181159.2512A39C0F9@new.scipy.org> Author: oliphant Date: 2007-05-13 13:11:55 -0500 (Sun, 13 May 2007) New Revision: 2992 Modified: trunk/Lib/interpolate/interpolate.py trunk/THANKS.txt Log: Add some support for csplines using various boundary conditions. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-13 17:15:57 UTC (rev 2991) +++ trunk/Lib/interpolate/interpolate.py 2007-05-13 18:11:55 UTC (rev 2992) @@ -4,14 +4,14 @@ # !! Need to find argument for keeping initialize. If it isn't # !! found, get rid of it! -__all__ = ['interp1d', 'interp2d'] +__all__ = ['interp1d', 'interp2d', 'cspline', 'cspeval', 'csprep'] from numpy import shape, sometrue, rank, array, transpose, \ swapaxes, searchsorted, clip, take, ones, putmask, less, greater, \ logical_or, atleast_1d, atleast_2d, meshgrid, ravel import numpy as np -import fitpack +#import fitpack def reduce_sometrue(a): all = a @@ -295,3 +295,159 @@ out_of_bounds = logical_or(below_bounds, above_bounds) return out_of_bounds + +def _get_cspline_Bb(yk, xk, kind, conds): + # internal function to compute different tri-diagonal system + # depending on the kind of spline requested. + # conds is only used for 'second' and 'first' + Np1 = len(xk) + if kind in ['natural', 'second']: + if kind == 'natural': + m0, mN = 0.0, 0.0 + else: + m0, mN = conds + + # the matrix to invert is (N-1,N-1) + beta = 2*(xk[2:]-xk[:-2]) + alpha = xk[1:]-xk[:-1] + B = np.diag(alpha[1:-1],k=-1) + np.diag(beta) + np.diag(alpha[2:],k=1) + dyk = yk[1:]-yk[:-1] + b = (dyk[1:]/alpha[1:] - dyk[:-1]/alpha[:-1]) + b *= 6 + b[0] -= m0 + b[-1] -= mN + + # put m0 and mN into the correct shape for + # concatenation + m0 = array(m0,copy=0,ndmin=yk.ndim) + mN = array(mN,copy=0,ndmin=yk.ndim) + if m0.shape[1:] != yk.shape[1:]: + m0 = m0*(ones(yk.shape[1:])[newaxis,...]) + if mN.shape[1:] != yk.shape[1:]: + mN = mN*(ones(yk.shape[1:])[newaxis,...]) + + return B, b, m0, mN + + + elif kind in ['clamped', 'endslope', 'first', 'not-a-knot', 'runout', + 'parabolic']: + if kind == 'endslope': + # match slope of lagrange interpolating polynomial of + # order 3 at end-points. + x0,x1,x2,x3 = xk[:4] + sl_0 = (1./(x0-x1)+1./(x0-x2)+1./(x0-x3))*yk[0] + sl_0 += (x0-x2)*(x0-x3)/((x1-x0)*(x1-x2)*(x1-x3))*yk[1] + sl_0 += (x0-x1)*(x0-x3)/((x2-x0)*(x2-x1)*(x3-x2))*yk[2] + sl_0 += (x0-x1)*(x0-x2)/((x3-x0)*(x3-x1)*(x3-x2))*yk[3] + + xN3,xN2,xN1,xN0 = xk[-4:] + sl_N = (1./(xN0-xN1)+1./(xN0-xN2)+1./(xN0-xN3))*yk[-1] + sl_N += (xN0-xN2)*(xN0-xN3)/((xN1-xN0)*(xN1-xN2)*(xN1-xN3))*yk[-2] + sl_N += (xN0-xN1)*(xN0-xN3)/((xN2-xN0)*(xN2-xN1)*(xN3-xN2))*yk[-3] + sl_N += (xN0-xN1)*(xN0-xN2)/((xN3-xN0)*(xN3-xN1)*(xN3-xN2))*yk[-4] + elif kind == 'clamped': + sl_0,sl_N = 0.0, 0.0 + elif kind == 'first': + sl_0, sl_N = conds + + # Now set up the (N+1)x(N+1) system of equations + beta = np.r_[0,2*(xk[2:]-xk[:-2]),0] + alpha = xk[1:]-xk[:-1] + gamma = np.r_[0,alpha[1:]] + B = np.diag(alpha,k=-1) + np.diag(beta) + np.diag(gamma,k=1) + d1 = alpha[0] + dN = alpha[-1] + if kind == 'not-a-knot': + d2 = alpha[1] + dN1 = alpha[-2] + B[0,:3] = [d2,-d1-d2,d1] + B[-1,-3:] = [dN,-dN1-dN,dN1] + elif kind == 'runout': + B[0,:3] = [1,-2,1] + b[-1,-3:] = [1,-2,1] + elif kind == 'parabolic': + B[0,:2] = [1,-1] + B[-1,-2:] = [-1,1] + elif kind == 'periodic': + raise NotImplementedError + elif kind == 'symmetric': + raise NotImplementedError + else: + B[0,:2] = [2*d1,d1] + B[-1,-2:] = [dN,2*dN] + + b = np.empty((Np1,)*yk.shape[1:]) + dyk = (yk[1:]-yk[:-1])*1.0 + if kind in ['not-a-knot', 'runout', 'parabolic']: + b[0] = b[-1] = 0.0 + elif kind == 'periodic': + raise NotImplementedError + elif kind == 'symmetric': + raise NotImplementedError + else: + b[0] = (dyk[0]/d1 - sl_0) + b[-1] = (dyk[-1]/dN - sl_N) + b[1:-1,...] = (dyk[1:]/alpha[1:]-dyk[:-1]/alpha[:-1]) + b *= 6.0 + return B, b, None, None + else: + raise ValueError, "%s not supported" % kind + + +def cspeval((mk,xk,yk),xnew): + """Evaluate a cubic-spline representation of the points (xk,yk) + at the new values xnew. The mk values are the second derivatives at xk + The xk vector must be sorted. + + More than one curve can be represented using 2-d arrays for mk and yk. + However, the last dimension must have the same shape as the 1-d array xk. + The first-dimension will be considered the interpolating dimension. + """ + indxs = np.searchsorted(xk, xnew) + indxsm1 = indxs-1 + xkm1 = xk[indxsm1] + xkvals = xk[indxs] + dm1 = xnew - xkm1 + d = xkvals - xnew + mk0 = mk[indxs] + mkm1 = mk[indxsm1] + dk = xkvals-xkm1 + val = (mk0*dm1**3. + mkm1*d**3.)/(6*dk) + val += (yk[indxsm1]/dk - mkm1*dk/6.)*d + val += (yk[indxs]/dk - mk0*dk/6.)*dm1 + return val + +def csprep(yk,xk=None,kind='not-a-knot',conds=None): + """Return a (Spp,xk,yk) representation of a cubic spline given data-points + + yk can be a 2-d array to represent more than one curve, through + the same xk points. The first dimension is assumed to be the + interpolating dimenion. + + If xk is None, then x=arange(len(yk)) will be assumed + + kind can be 'natural', 'second', 'clamped', 'endslope', 'periodic', + 'symmetric', 'parabolic', 'not-a-knot', 'runout' + + for 'second', and 'clamped' conditions can be given which should + be the desired second and first derivatives at + the end-points, respectively. + """ + yk = np.asanyarray(yk) + N = yk.shape[0]-1 + if xk is None: + xk = arange(N+1,dtype=float) + factor = 1.0 + else: + deltax = (xk[-1]-xk[0])/float(N) + factor = deltax**2 + B,b,first,last = _get_cspline_Bb(yk, xk, kind, conds) + mk = np.dual.solve(B,b) + if first is not None: + mk = np.concatenate((first, mk), axis=0) + if last is not None: + mk = np.concatenate((mk, last), axis=0) + return mk/factor, xk, yk + +def cspline(yk,xk,xnew,kind='not-a-knot',conds=None): + return cspeval(csprep(yk,xk,kind=kind,conds=conds),xnew) Modified: trunk/THANKS.txt =================================================================== --- trunk/THANKS.txt 2007-05-13 17:15:57 UTC (rev 2991) +++ trunk/THANKS.txt 2007-05-13 18:11:55 UTC (rev 2992) @@ -11,14 +11,16 @@ Jones. Travis and Eric each contributed about half the orginal code. Pearu developed f2py, which is the integral to wrapping the many Fortran libraries used in SciPy. All three continually work on both algorithm development and -improvements to the feature set, build process, and robustness of SciPy. +improvements to the feature set, build process, and robustness of SciPy. +Please add names as needed so that we can keep up with all the contributors. Nathan Bell -- sparsetools reimplementation (CSR/CSC matrix operations) Robert Cimrman -- UMFpack wrapper for sparse matrix module David M. Cooke -- improvements to system_info, and LBFGSB wrapper Chuck Harris -- Zeros package in optimize (1d root-finding algorithms) Prabhu Ramachandran -- improvements to gui_thread +Robert Kern -- improvements to stats and bug-fixes Jean-Sebastien Roy -- fmin_tnc code which he adapted from Stephen Nash's original Fortran Ed Schofield -- Maximum entropy and Monte Carlo modules, help with From scipy-svn at scipy.org Sun May 13 15:49:54 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 14:49:54 -0500 (CDT) Subject: [Scipy-svn] r2993 - trunk/Lib/optimize Message-ID: <20070513194954.6021339C134@new.scipy.org> Author: ondrej Date: 2007-05-13 14:49:50 -0500 (Sun, 13 May 2007) New Revision: 2993 Modified: trunk/Lib/optimize/__init__.py Log: import broyden methods by default in optimize Modified: trunk/Lib/optimize/__init__.py =================================================================== --- trunk/Lib/optimize/__init__.py 2007-05-13 18:11:55 UTC (rev 2992) +++ trunk/Lib/optimize/__init__.py 2007-05-13 19:49:50 UTC (rev 2993) @@ -11,7 +11,8 @@ from lbfgsb import fmin_l_bfgs_b from tnc import fmin_tnc from cobyla import fmin_cobyla -import nonlin +from nonlin import broyden1, broyden2, broyden3, broyden_generalized, \ + anderson, anderson2 __all__ = filter(lambda s:not s.startswith('_'),dir()) from numpy.testing import NumpyTest From scipy-svn at scipy.org Sun May 13 15:54:35 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 13 May 2007 14:54:35 -0500 (CDT) Subject: [Scipy-svn] r2994 - trunk/Lib/optimize Message-ID: <20070513195435.E6E8039C134@new.scipy.org> Author: ondrej Date: 2007-05-13 14:54:33 -0500 (Sun, 13 May 2007) New Revision: 2994 Modified: trunk/Lib/optimize/nonlin.py Log: Documentation updated (reflecting that the broyden methods are imported in optimize) Modified: trunk/Lib/optimize/nonlin.py =================================================================== --- trunk/Lib/optimize/nonlin.py 2007-05-13 19:49:50 UTC (rev 2993) +++ trunk/Lib/optimize/nonlin.py 2007-05-13 19:54:33 UTC (rev 2994) @@ -16,8 +16,8 @@ c = 0.01 return -d*numpy.array(x)-c*numpy.array(x)**3 -from scipy.optimize import nonlin -x = nonlin.broyden2(F,[1,1,1,1,1]) +from scipy import optimize +x = optimize.broyden2(F,[1,1,1,1,1]) All solvers have the parameter iter (the number of iterations to compute), some of them have other parameters of the solver, see the particular solver for From scipy-svn at scipy.org Mon May 14 09:56:34 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 08:56:34 -0500 (CDT) Subject: [Scipy-svn] r2995 - trunk/Lib/sandbox Message-ID: <20070514135634.0DC8F39C143@new.scipy.org> Author: dhuard Date: 2007-05-14 08:56:14 -0500 (Mon, 14 May 2007) New Revision: 2995 Added: trunk/Lib/sandbox/dhuard/ Log: svn test: creating a folder in the sandbox From scipy-svn at scipy.org Mon May 14 09:58:04 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 08:58:04 -0500 (CDT) Subject: [Scipy-svn] r2996 - trunk/Lib/interpolate Message-ID: <20070514135804.BC41A39C143@new.scipy.org> Author: stefan Date: 2007-05-14 08:57:50 -0500 (Mon, 14 May 2007) New Revision: 2996 Modified: trunk/Lib/interpolate/interpolate.py Log: Import missing fitpack. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-14 13:56:14 UTC (rev 2995) +++ trunk/Lib/interpolate/interpolate.py 2007-05-14 13:57:50 UTC (rev 2996) @@ -11,7 +11,7 @@ logical_or, atleast_1d, atleast_2d, meshgrid, ravel import numpy as np -#import fitpack +import fitpack def reduce_sometrue(a): all = a From scipy-svn at scipy.org Mon May 14 16:35:47 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 15:35:47 -0500 (CDT) Subject: [Scipy-svn] r2997 - trunk/Lib/sandbox/timeseries Message-ID: <20070514203547.B7F0539C033@new.scipy.org> Author: mattknox_ca Date: 2007-05-14 15:35:36 -0500 (Mon, 14 May 2007) New Revision: 2997 Modified: trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tseries.py Log: eliminated "include_last" paramter from various functions Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-14 13:57:50 UTC (rev 2996) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-14 20:35:36 UTC (rev 2997) @@ -541,7 +541,7 @@ def date_array(dlist=None, start_date=None, end_date=None, length=None, - include_last=True, freq=None): + freq=None): """Constructs a DateArray from: - a starting date and either an ending date or a given length. - a list of dates. @@ -583,9 +583,7 @@ else: if not isDate(end_date): raise DateError, "Ending date should be a valid Date instance!" - length = int(end_date - start_date) - if include_last: - length += 1 + length = int(end_date - start_date) + 1 # dlist = [(start_date+i).value for i in range(length)] dlist = numeric.arange(length, dtype=int_) dlist += start_date.value @@ -599,11 +597,11 @@ return date_array(dlist=dlist, freq=freq) def date_array_fromrange(start_date, end_date=None, length=None, - include_last=True, freq=None): + freq=None): """Constructs a DateArray from a starting date and either an ending date or a length.""" return date_array(start_date=start_date, end_date=end_date, - length=length, include_last=include_last, freq=freq) + length=length, freq=freq) #####--------------------------------------------------------------------------- #---- --- Definition of functions from the corresponding methods --- Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-14 13:57:50 UTC (rev 2996) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-14 20:35:36 UTC (rev 2997) @@ -951,7 +951,7 @@ #---- --- TimeSeries creator --- ##### ------------------------------------------------------------------------- def time_series(data, dates=None, freq=None, observed=None, - start_date=None, end_date=None, length=None, include_last=True, + start_date=None, end_date=None, length=None, mask=nomask, dtype=None, copy=False, fill_value=None, keep_mask=True, small_mask=True, hard_mask=False): @@ -971,8 +971,7 @@ length = dshape[0] if len(dshape) > 0: dates = date_array(start_date=start_date, end_date=end_date, - length=length, include_last=include_last, - freq=freq) + length=length, freq=freq) else: dates = date_array([], freq=freq) elif not isinstance(dates, DateArray): @@ -1435,7 +1434,7 @@ return time_series(data, dflat) # ...and now, fill it ! ...... (tstart, tend) = dflat[[0,-1]] - newdates = date_array(start_date=tstart, end_date=tend, include_last=True) + newdates = date_array(start_date=tstart, end_date=tend) nsize = newdates.size #............................. # Get the steps between consecutive data. From scipy-svn at scipy.org Mon May 14 18:11:54 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 17:11:54 -0500 (CDT) Subject: [Scipy-svn] r2998 - trunk/Lib/interpolate Message-ID: <20070514221154.3C68739C156@new.scipy.org> Author: oliphant Date: 2007-05-14 17:11:48 -0500 (Mon, 14 May 2007) New Revision: 2998 Modified: trunk/Lib/interpolate/interpolate.py Log: Fix spline interpolation. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-14 20:35:36 UTC (rev 2997) +++ trunk/Lib/interpolate/interpolate.py 2007-05-14 22:11:48 UTC (rev 2998) @@ -4,7 +4,8 @@ # !! Need to find argument for keeping initialize. If it isn't # !! found, get rid of it! -__all__ = ['interp1d', 'interp2d', 'cspline', 'cspeval', 'csprep'] +__all__ = ['interp1d', 'interp2d', 'cspline', 'cspeval', 'csprep', 'csp2pp', + 'ppval'] from numpy import shape, sometrue, rank, array, transpose, \ swapaxes, searchsorted, clip, take, ones, putmask, less, greater, \ @@ -296,7 +297,7 @@ return out_of_bounds -def _get_cspline_Bb(yk, xk, kind, conds): +def _get_cspline_Bb(xk, yk, kind, conds): # internal function to compute different tri-diagonal system # depending on the kind of spline requested. # conds is only used for 'second' and 'first' @@ -346,7 +347,7 @@ sl_N += (xN0-xN1)*(xN0-xN3)/((xN2-xN0)*(xN2-xN1)*(xN3-xN2))*yk[-3] sl_N += (xN0-xN1)*(xN0-xN2)/((xN3-xN0)*(xN3-xN1)*(xN3-xN2))*yk[-4] elif kind == 'clamped': - sl_0,sl_N = 0.0, 0.0 + sl_0, sl_N = 0.0, 0.0 elif kind == 'first': sl_0, sl_N = conds @@ -375,8 +376,9 @@ else: B[0,:2] = [2*d1,d1] B[-1,-2:] = [dN,2*dN] - - b = np.empty((Np1,)*yk.shape[1:]) + + # Set up RHS (b) + b = np.empty((Np1,)+yk.shape[1:]) dyk = (yk[1:]-yk[:-1])*1.0 if kind in ['not-a-knot', 'runout', 'parabolic']: b[0] = b[-1] = 0.0 @@ -386,7 +388,7 @@ raise NotImplementedError else: b[0] = (dyk[0]/d1 - sl_0) - b[-1] = (dyk[-1]/dN - sl_N) + b[-1] = -(dyk[-1]/dN - sl_N) b[1:-1,...] = (dyk[1:]/alpha[1:]-dyk[:-1]/alpha[:-1]) b *= 6.0 return B, b, None, None @@ -404,6 +406,7 @@ The first-dimension will be considered the interpolating dimension. """ indxs = np.searchsorted(xk, xnew) + indxs[indxs==0] = 1 indxsm1 = indxs-1 xkm1 = xk[indxsm1] xkvals = xk[indxs] @@ -417,15 +420,46 @@ val += (yk[indxs]/dk - mk0*dk/6.)*dm1 return val -def csprep(yk,xk=None,kind='not-a-knot',conds=None): - """Return a (Spp,xk,yk) representation of a cubic spline given data-points +def csp2pp(mk,xk,yk): + """Return an N-d array providing the piece-wise polynomial form. - yk can be a 2-d array to represent more than one curve, through + mk - second derivative at the knots + xk - knot-points + yk - values of the curve at the knots + + The first 2 dimensions are the polynomial for a particular + curve. The first dimension provides the coefficients for the + polynomial and the second dimension provides the different pieces + """ + dk = xk[1:] - xk[:-1] + temp1 = mk[1:] - mk[:-1] + temp2 = mk[1:]*xk[:-1]-mk[:-1]*xk[1:] + c3 = temp1/(6*dk) + c2 = -temp2/(2*dk) + c1 = (mk[1:]*xk[:-1]**2 - mk[:-1]*xk[1:]**2)/(2*dk) + c1 -= temp1*dk/6. + c1 += (yk[1:]-yk[:-1])/dk + c0 = (mk[:-1]*xk[1:]**3 - mk[1:]*xk[:-1]**3)/(6*dk) + c0 += temp2*dk/6. + c0 += (yk[:-1]*xk[1:] - yk[1:]*xk[:-1])/dk + return np.array([c3,c2,c1,c0]) + +def ppval(pp, xk, xnew): + """Compute a piece-wise polynomial defined by the array of + coefficents pp and the break-points xk on the grid xnew + """ + indxs = numpy.searchsorted(xk, xnew)-1 + indxs[indxs<0]=0 + return array([numpy.polyval(pp[:,k],xnew[i]) for i,k in enumerate(indxs)]) + +def csprep(xk,yk,kind='not-a-knot',conds=None): + """Return a (Spp,xk,yk) representation of a cubic spline given + data-points + + yk can be an N-d array to represent more than one curve, through the same xk points. The first dimension is assumed to be the interpolating dimenion. - If xk is None, then x=arange(len(yk)) will be assumed - kind can be 'natural', 'second', 'clamped', 'endslope', 'periodic', 'symmetric', 'parabolic', 'not-a-knot', 'runout' @@ -435,19 +469,13 @@ """ yk = np.asanyarray(yk) N = yk.shape[0]-1 - if xk is None: - xk = arange(N+1,dtype=float) - factor = 1.0 - else: - deltax = (xk[-1]-xk[0])/float(N) - factor = deltax**2 - B,b,first,last = _get_cspline_Bb(yk, xk, kind, conds) + B,b,first,last = _get_cspline_Bb(xk, yk, kind, conds) mk = np.dual.solve(B,b) if first is not None: mk = np.concatenate((first, mk), axis=0) if last is not None: mk = np.concatenate((mk, last), axis=0) - return mk/factor, xk, yk + return mk, xk, yk -def cspline(yk,xk,xnew,kind='not-a-knot',conds=None): - return cspeval(csprep(yk,xk,kind=kind,conds=conds),xnew) +def cspline(xk,yk,xnew,kind='not-a-knot',conds=None): + return cspeval(csprep(xk,yk,kind=kind,conds=conds),xnew) From scipy-svn at scipy.org Mon May 14 20:18:19 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 19:18:19 -0500 (CDT) Subject: [Scipy-svn] r2999 - in trunk/Lib/sandbox/maskedarray: . tests Message-ID: <20070515001819.D939B39C00F@new.scipy.org> Author: pierregm Date: 2007-05-14 19:17:29 -0500 (Mon, 14 May 2007) New Revision: 2999 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/extras.py trunk/Lib/sandbox/maskedarray/mstats.py trunk/Lib/sandbox/maskedarray/tests/test_mstats.py Log: maskedarray extras.dot : added a 'strict' argument to (de)activate the propagation of masked values mstats : added cov core.argosrt : make sure that the filled array is a pure ndarray Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-14 22:11:48 UTC (rev 2998) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-15 00:17:29 UTC (rev 2999) @@ -66,13 +66,10 @@ import numpy.core.umath as umath import numpy.core.fromnumeric as fromnumeric -from numpy.core.numeric import ndarray -from numpy.core.fromnumeric import amax, amin -import numpy.core.numerictypes as ntypes -from numpy.core.numerictypes import bool_, typecodes -from numpy.core.multiarray import dtype import numpy.core.numeric as numeric -from numpy.lib.shape_base import expand_dims as n_expand_dims +import numpy.core.numerictypes as ntypes +from numpy import bool_, dtype, typecodes, amax, amin, ndarray +from numpy import expand_dims as n_expand_dims import warnings @@ -1713,7 +1710,7 @@ """ if fill_value is None: fill_value = default_fill_value(self) - d = self.filled(fill_value) + d = self.filled(fill_value).view(ndarray) return d.argsort(axis=axis, kind=kind, order=order) #........................ def argmin(self, axis=None, fill_value=None): Modified: trunk/Lib/sandbox/maskedarray/extras.py =================================================================== --- trunk/Lib/sandbox/maskedarray/extras.py 2007-05-14 22:11:48 UTC (rev 2998) +++ trunk/Lib/sandbox/maskedarray/extras.py 2007-05-15 00:17:29 UTC (rev 2999) @@ -426,22 +426,22 @@ return mask_rowcols(a, 1) -def dot(a,b): +def dot(a,b, strict=False): """Returns the dot product of two 2D masked arrays a and b. Like the generic numpy equivalent the product sum is over the last dimension of a and the second-to-last dimension of b. - Masked values are propagated: if a masked value appears in a row or column, - the whole row or column is considered masked. + If strict is True, masked values are propagated: if a masked value appears + in a row or column, the whole row or column is considered masked. NB: The first argument is not conjugated. """ #TODO: Works only with 2D arrays. There should be a way to get it to run with higher dimension - if (a.ndim == 2) and (b.ndim == 2): + if strict and (a.ndim == 2) and (b.ndim == 2): a = mask_rows(a) b = mask_cols(b) # - d = numpy.dot(a.filled(0), b.filled(0)) + d = numpy.dot(filled(a, 0), filled(b, 0)) # am = (~getmaskarray(a)) bm = (~getmaskarray(b)) @@ -496,6 +496,9 @@ r_mask = dm return masked_array(r_data, mask=r_mask) + + + #####-------------------------------------------------------------------------- #---- --- Concatenation helpers --- #####-------------------------------------------------------------------------- Modified: trunk/Lib/sandbox/maskedarray/mstats.py =================================================================== --- trunk/Lib/sandbox/maskedarray/mstats.py 2007-05-14 22:11:48 UTC (rev 2998) +++ trunk/Lib/sandbox/maskedarray/mstats.py 2007-05-15 00:17:29 UTC (rev 2999) @@ -15,12 +15,13 @@ import numpy from numpy import bool_, float_, int_ from numpy import array as narray -from numpy.core import numeric as numeric +import numpy.core.numeric as numeric +from numpy.core.numeric import concatenate import maskedarray as MA from maskedarray.core import masked, nomask, MaskedArray from maskedarray.core import masked_array as marray -from maskedarray.extras import apply_along_axis +from maskedarray.extras import apply_along_axis, dot def _quantiles_1D(data,m,p): @@ -149,8 +150,53 @@ # return med return apply_along_axis(_median1d, 0, data) +def cov(x, y=None, rowvar=True, bias=False, strict=False): + """ + Estimate the covariance matrix. + If x is a vector, return the variance. For matrices, returns the covariance + matrix. + If y is given, it is treated as an additional (set of) variable(s). + + Normalization is by (N-1) where N is the number of observations (unbiased + estimate). If bias is True then normalization is by N. + + If rowvar is non-zero (default), then each row is a variable with observations + in the columns, otherwise each column is a variable and the observations are + in the rows. + + If strict is True, masked values are propagated: if a masked value appears in + a row or column, the whole row or column is considered masked. + """ + X = narray(x, ndmin=2, subok=True, dtype=float) + if X.shape[0] == 1: + rowvar = True + if rowvar: + axis = 0 + tup = (slice(None),None) + else: + axis = 1 + tup = (None, slice(None)) + # + if y is not None: + y = narray(y, copy=False, ndmin=2, subok=True, dtype=float) + X = concatenate((X,y),axis) + # + X -= X.mean(axis=1-axis)[tup] + n = X.count(1-axis) + # + if bias: + fact = n*1.0 + else: + fact = n-1.0 + # + if not rowvar: + return (dot(X.T, X.conj(), strict=False) / fact).squeeze() + else: + return (dot(X, X.T.conj(), strict=False) / fact).squeeze() + + ################################################################################ if __name__ == '__main__': from maskedarray.testutils import assert_almost_equal, assert_equal Modified: trunk/Lib/sandbox/maskedarray/tests/test_mstats.py =================================================================== --- trunk/Lib/sandbox/maskedarray/tests/test_mstats.py 2007-05-14 22:11:48 UTC (rev 2998) +++ trunk/Lib/sandbox/maskedarray/tests/test_mstats.py 2007-05-15 00:17:29 UTC (rev 2999) @@ -18,7 +18,7 @@ import maskedarray.testutils from maskedarray.testutils import * -from maskedarray.mstats import mquantiles, mmedian +from maskedarray.mstats import mquantiles, mmedian, cov #.............................................................................. class test_quantiles(NumpyTestCase): @@ -110,6 +110,20 @@ x[x%5==0] = masked assert_equal(mmedian(x), [[12,10],[8,9],[16,17]]) +class test_misc(NumpyTestCase): + def __init__(self, *args, **kwds): + NumpyTestCase.__init__(self, *args, **kwds) + + def check_cov(self): + "Tests the cov function." + x = masked_array([[1,2,3],[4,5,6]], mask=[[1,0,0],[0,0,0]]) + c = cov(x[0]) + assert_equal(c, (x[0].anom()**2).sum()) + c = cov(x[1]) + assert_equal(c, (x[1].anom()**2).sum()/2.) + c = cov(x) + assert_equal(c[1,0], (x[0].anom()*x[1].anom()).sum()) + ############################################################################### #------------------------------------------------------------------------------ if __name__ == "__main__": From scipy-svn at scipy.org Mon May 14 22:56:35 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 14 May 2007 21:56:35 -0500 (CDT) Subject: [Scipy-svn] r3000 - trunk/Lib/interpolate Message-ID: <20070515025635.0BAA1C7C005@new.scipy.org> Author: oliphant Date: 2007-05-14 21:56:16 -0500 (Mon, 14 May 2007) New Revision: 3000 Modified: trunk/Lib/interpolate/interpolate.py Log: Change spline names to allow for adding quadratic, quartic, and quintic splines. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-15 00:17:29 UTC (rev 2999) +++ trunk/Lib/interpolate/interpolate.py 2007-05-15 02:56:16 UTC (rev 3000) @@ -1,12 +1,9 @@ """ Classes for interpolating values. """ -# !! Need to find argument for keeping initialize. If it isn't -# !! found, get rid of it! +__all__ = ['interp1d', 'interp2d', 'spline3', 'sp3eval', 'sp3rep', 'sp3topp', + 'ppform'] -__all__ = ['interp1d', 'interp2d', 'cspline', 'cspeval', 'csprep', 'csp2pp', - 'ppval'] - from numpy import shape, sometrue, rank, array, transpose, \ swapaxes, searchsorted, clip, take, ones, putmask, less, greater, \ logical_or, atleast_1d, atleast_2d, meshgrid, ravel @@ -20,6 +17,9 @@ all = sometrue(all,axis=0) return all +# !! Need to find argument for keeping initialize. If it isn't +# !! found, get rid of it! + class interp2d(object): """ Interpolate over a 2D grid. @@ -151,8 +151,8 @@ An N-D array of real values. y's length along the interpolation axis must be equal to the length of x. kind : str - Specifies the kind of interpolation. At the moment, only 'linear' is - implemented. + Specifies the kind of interpolation. At the moment, + only 'linear' and 'cubic' are implemented. axis : int Specifies the axis of y along which to interpolate. Interpolation defaults to the last axis of y. @@ -176,54 +176,48 @@ self.bounds_error = bounds_error self.fill_value = fill_value - if kind != 'linear': - raise NotImplementedError("Only linear supported for now. Use " - "fitpack routines for other types.") - + if kind not in ['linear', 'cubic']: + raise NotImplementedError("Only linear and cubic supported for " \ + "now. Use fitpack routines "\ + "for other types.") x = array(x, copy=self.copy) y = array(y, copy=self.copy) - if len(x.shape) != 1: + if x.ndim != 1: raise ValueError("the x array must have exactly one dimension.") - if len(y.shape) == 0: + if y.ndim == 0: raise ValueError("the y array must have at least one dimension.") # Normalize the axis to ensure that it is positive. self.axis = axis % len(y.shape) - - # Make a "view" of the y array that is rotated to the interpolation - # axis. - oriented_y = y.swapaxes(self._interp_axis, axis) + self._kind = kind + + if kind == 'linear': + # Make a "view" of the y array that is rotated to the interpolation + # axis. + oriented_y = y.swapaxes(self._interp_axis, axis) + minval = 2 + len_y = oriented_y.shape[self._interp_axis] + self._call = self._call_linear + else: + oriented_y = y.swapaxes(0, axis) + minval = 4 + len_y = oriented_y.shape[0] + self._call = self._call_cubic + self._spline = sp3rep(x,oriented_y) + len_x = len(x) - len_y = oriented_y.shape[self._interp_axis] if len_x != len_y: - raise ValueError("x and y arrays must be equal in length along" + raise ValueError("x and y arrays must be equal in length along" "interpolation axis.") - if len_x < 2 or len_y < 2: - raise ValueError("x and y arrays must have more than 1 entry") + if len_x < minval: + raise ValueError("x and y arrays must have at " \ + "least %d entries" % minval) self.x = x self.y = oriented_y - def __call__(self, x_new): - """ Find linearly interpolated y_new = f(x_new). + def _call_linear(self, x_new): - Parameters - ---------- - x_new : number or array - New independent variable(s). - - Returns - ------- - y_new : number or array - Linearly interpolated value(s) corresponding to x_new. - """ - - # 1. Handle values in x_new that are outside of x. Throw error, - # or return a list of mask array indicating the outofbounds values. - # The behavior is set by the bounds_error variable. - x_new = atleast_1d(x_new) - out_of_bounds = self._check_bounds(x_new) - # 2. Find where in the orignal data, the values to interpolate # would be inserted. # Note: If x_new[n] == x[m], then m is returned by searchsorted. @@ -250,19 +244,49 @@ # 5. Calculate the actual value for each entry in x_new. y_new = slope*(x_new-x_lo) + y_lo - # 6. Fill any values that were out of bounds with fill_value. - y_new[..., out_of_bounds] = self.fill_value + return y_new + def _call_cubic(self, x_new): + return sp3eval(self._spline,x_new) + + def __call__(self, x_new): + """ Find linearly interpolated y_new = f(x_new). + + Parameters + ---------- + x_new : number or array + New independent variable(s). + + Returns + ------- + y_new : number or array + Linearly interpolated value(s) corresponding to x_new. + """ + + # 1. Handle values in x_new that are outside of x. Throw error, + # or return a list of mask array indicating the outofbounds values. + # The behavior is set by the bounds_error variable. + x_new = atleast_1d(x_new) + out_of_bounds = self._check_bounds(x_new) + + y_new = self._call(x_new) + + if self._kind == 'linear': + # 6. Fill any values that were out of bounds with fill_value. + y_new[..., out_of_bounds] = self.fill_value + else: + y_new[out_of_bounds] = self.fill_value + # Rotate the values of y_new back so that they correspond to the # correct x_new values. For N-D x_new, take the last N axes from y_new # and insert them where self.axis was in the list of axes. - nx = len(x_new.shape) - ny = len(y_new.shape) + nx = x_new.ndim + ny = y_new.ndim axes = range(ny - nx) axes[self.axis:self.axis] = range(ny - nx, ny) result = y_new.transpose(axes) - return result + def _check_bounds(self, x_new): """ Check the inputs for being in the bounds of the interpolated data. @@ -296,8 +320,24 @@ out_of_bounds = logical_or(below_bounds, above_bounds) return out_of_bounds +class ppform(object): + def __init__(self, coeffs, breaks, dosort=False): + self.coeffs = np.asarray(coeffs) + if dosort: + self.breaks = np.sort(breaks) + else: + self.breaks = np.asarray(breaks) + self.N = self.coeffs.shape[0] + def __call__(self, xnew): + indxs = np.searchsorted(self.breaks, xnew)-1 + indxs[indxs<0]=0 + pp = self.coeffs + V = np.vander(xnew,N=self.N) + # res = np.diag(np.dot(V,pp[:,indxs])) + res = array([np.dot(V[k,:],pp[:,indxs[k]]) for k in xrange(len(xnew))]) + return res -def _get_cspline_Bb(xk, yk, kind, conds): +def _get_spline3_Bb(xk, yk, kind, conds): # internal function to compute different tri-diagonal system # depending on the kind of spline requested. # conds is only used for 'second' and 'first' @@ -365,7 +405,7 @@ B[-1,-3:] = [dN,-dN1-dN,dN1] elif kind == 'runout': B[0,:3] = [1,-2,1] - b[-1,-3:] = [1,-2,1] + B[-1,-3:] = [1,-2,1] elif kind == 'parabolic': B[0,:2] = [1,-1] B[-1,-2:] = [-1,1] @@ -396,7 +436,7 @@ raise ValueError, "%s not supported" % kind -def cspeval((mk,xk,yk),xnew): +def sp3eval((mk,xk,yk),xnew): """Evaluate a cubic-spline representation of the points (xk,yk) at the new values xnew. The mk values are the second derivatives at xk The xk vector must be sorted. @@ -420,7 +460,7 @@ val += (yk[indxs]/dk - mk0*dk/6.)*dm1 return val -def csp2pp(mk,xk,yk): +def sp3topp(mk,xk,yk): """Return an N-d array providing the piece-wise polynomial form. mk - second derivative at the knots @@ -442,17 +482,9 @@ c0 = (mk[:-1]*xk[1:]**3 - mk[1:]*xk[:-1]**3)/(6*dk) c0 += temp2*dk/6. c0 += (yk[:-1]*xk[1:] - yk[1:]*xk[:-1])/dk - return np.array([c3,c2,c1,c0]) + return ppform([c3,c2,c1,c0], xk) -def ppval(pp, xk, xnew): - """Compute a piece-wise polynomial defined by the array of - coefficents pp and the break-points xk on the grid xnew - """ - indxs = numpy.searchsorted(xk, xnew)-1 - indxs[indxs<0]=0 - return array([numpy.polyval(pp[:,k],xnew[i]) for i,k in enumerate(indxs)]) - -def csprep(xk,yk,kind='not-a-knot',conds=None): +def sp3rep(xk,yk,kind='not-a-knot',conds=None): """Return a (Spp,xk,yk) representation of a cubic spline given data-points @@ -469,7 +501,7 @@ """ yk = np.asanyarray(yk) N = yk.shape[0]-1 - B,b,first,last = _get_cspline_Bb(xk, yk, kind, conds) + B,b,first,last = _get_spline3_Bb(xk, yk, kind, conds) mk = np.dual.solve(B,b) if first is not None: mk = np.concatenate((first, mk), axis=0) @@ -477,5 +509,13 @@ mk = np.concatenate((mk, last), axis=0) return mk, xk, yk -def cspline(xk,yk,xnew,kind='not-a-knot',conds=None): - return cspeval(csprep(xk,yk,kind=kind,conds=conds),xnew) +def spline3(xk,yk,xnew,kind='not-a-knot',conds=None): + return sp3eval(sp3rep(xk,yk,kind=kind,conds=conds),xnew) + +def sp2rep(xk,yk): + pass + +def sp2eval(xk,yk): + pass + +def From scipy-svn at scipy.org Tue May 15 05:31:58 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 04:31:58 -0500 (CDT) Subject: [Scipy-svn] r3001 - trunk/Lib/interpolate Message-ID: <20070515093158.8D21639C030@new.scipy.org> Author: jarrod.millman Date: 2007-05-15 04:31:55 -0500 (Tue, 15 May 2007) New Revision: 3001 Modified: trunk/Lib/interpolate/interpolate.py Log: removed extra def Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-15 02:56:16 UTC (rev 3000) +++ trunk/Lib/interpolate/interpolate.py 2007-05-15 09:31:55 UTC (rev 3001) @@ -517,5 +517,3 @@ def sp2eval(xk,yk): pass - -def From scipy-svn at scipy.org Tue May 15 05:41:32 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 04:41:32 -0500 (CDT) Subject: [Scipy-svn] r3002 - in trunk/Lib: cluster fftpack interpolate odr stats weave Message-ID: <20070515094132.A33E239C030@new.scipy.org> Author: jarrod.millman Date: 2007-05-15 04:41:19 -0500 (Tue, 15 May 2007) New Revision: 3002 Modified: trunk/Lib/cluster/setup.py trunk/Lib/fftpack/setup.py trunk/Lib/interpolate/setup.py trunk/Lib/odr/setup.py trunk/Lib/stats/setup.py trunk/Lib/weave/setup.py Log: cosmetic cleanup to make setup.py imports more consistent Modified: trunk/Lib/cluster/setup.py =================================================================== --- trunk/Lib/cluster/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/cluster/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,7 +1,6 @@ #!/usr/bin/env python -import os -join = os.path.join +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration Modified: trunk/Lib/fftpack/setup.py =================================================================== --- trunk/Lib/fftpack/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/fftpack/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,8 +1,7 @@ #!/usr/bin/env python # Created by Pearu Peterson, August 2002 -import os -join = os.path.join +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration Modified: trunk/Lib/interpolate/setup.py =================================================================== --- trunk/Lib/interpolate/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/interpolate/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,6 +1,6 @@ #!/usr/bin/env python -import os +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration @@ -8,7 +8,7 @@ config = Configuration('interpolate', parent_package, top_path) config.add_library('fitpack', - sources=[os.path.join('fitpack', '*.f')], + sources=[join('fitpack', '*.f')], ) config.add_extension('_fitpack', Modified: trunk/Lib/odr/setup.py =================================================================== --- trunk/Lib/odr/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/odr/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,6 +1,6 @@ #!/usr/bin/env python -import os,sys,re +from os.path import join from distutils import dep_util from glob import glob import warnings @@ -26,7 +26,7 @@ warnings.warn(BlasNotFoundError.__doc__) libodr_files.append('d_lpkbls.f') - libodr = [os.path.join('odrpack', x) for x in libodr_files] + libodr = [join('odrpack', x) for x in libodr_files] config.add_library('odrpack', sources=libodr) sources = ['__odrpack.c'] libraries = ['odrpack'] + blas_info.pop('libraries', []) Modified: trunk/Lib/stats/setup.py =================================================================== --- trunk/Lib/stats/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/stats/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,6 +1,6 @@ #!/usr/bin/env python -import os +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration @@ -9,7 +9,7 @@ config.add_data_dir('tests') config.add_library('statlib', - sources=[os.path.join('statlib', '*.f')]) + sources=[join('statlib', '*.f')]) # add statlib module config.add_extension('statlib', Modified: trunk/Lib/weave/setup.py =================================================================== --- trunk/Lib/weave/setup.py 2007-05-15 09:31:55 UTC (rev 3001) +++ trunk/Lib/weave/setup.py 2007-05-15 09:41:19 UTC (rev 3002) @@ -1,11 +1,13 @@ #!/usr/bin/env python -import os + +from os.path import join + def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('weave',parent_package,top_path) config.add_data_dir('tests') config.add_data_dir('scxx') - config.add_data_dir(os.path.join('blitz','blitz')) + config.add_data_dir(join('blitz','blitz')) config.add_data_dir('doc') config.add_data_dir('examples') return config From scipy-svn at scipy.org Tue May 15 06:35:24 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 05:35:24 -0500 (CDT) Subject: [Scipy-svn] r3003 - trunk/Lib/sandbox Message-ID: <20070515103524.25CE839C08A@new.scipy.org> Author: jarrod.millman Date: 2007-05-15 05:35:21 -0500 (Tue, 15 May 2007) New Revision: 3003 Modified: trunk/Lib/sandbox/setup.py Log: cosmetic fix for sandbox/setup.py Modified: trunk/Lib/sandbox/setup.py =================================================================== --- trunk/Lib/sandbox/setup.py 2007-05-15 09:41:19 UTC (rev 3002) +++ trunk/Lib/sandbox/setup.py 2007-05-15 10:35:21 UTC (rev 3003) @@ -1,13 +1,15 @@ -import os +#!/usr/bin/env python +from os.path import join + def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('sandbox',parent_package,top_path) sandbox_packages = [] try: - sandbox_file = open(os.path.join(config.package_path, - 'enabled_packages.txt'), 'rU') + sandbox_file = open(join(config.package_path, + 'enabled_packages.txt'), 'rU') except IOError: pass else: From scipy-svn at scipy.org Tue May 15 07:18:32 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 06:18:32 -0500 (CDT) Subject: [Scipy-svn] r3004 - trunk/Lib/interpolate Message-ID: <20070515111832.4633039C07E@new.scipy.org> Author: oliphant Date: 2007-05-15 06:18:19 -0500 (Tue, 15 May 2007) New Revision: 3004 Modified: trunk/Lib/interpolate/interpolate.py Log: Add quadratic interpolation as well as place holders for quartic and quintic interpolators. Add zero-order hold, linear, quadratic, and cubic spline to interp1d. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-15 10:35:21 UTC (rev 3003) +++ trunk/Lib/interpolate/interpolate.py 2007-05-15 11:18:19 UTC (rev 3004) @@ -1,13 +1,14 @@ """ Classes for interpolating values. """ -__all__ = ['interp1d', 'interp2d', 'spline3', 'sp3eval', 'sp3rep', 'sp3topp', +__all__ = ['interp1d', 'interp2d', 'spline', 'spleval', 'splmake', 'spltopp', 'ppform'] from numpy import shape, sometrue, rank, array, transpose, \ swapaxes, searchsorted, clip, take, ones, putmask, less, greater, \ logical_or, atleast_1d, atleast_2d, meshgrid, ravel import numpy as np +import scipy.linalg as slin import fitpack @@ -152,7 +153,7 @@ axis must be equal to the length of x. kind : str Specifies the kind of interpolation. At the moment, - only 'linear' and 'cubic' are implemented. + only 'linear' and 'cubic' are implemented for now. axis : int Specifies the axis of y along which to interpolate. Interpolation defaults to the last axis of y. @@ -176,10 +177,16 @@ self.bounds_error = bounds_error self.fill_value = fill_value - if kind not in ['linear', 'cubic']: - raise NotImplementedError("Only linear and cubic supported for " \ - "now. Use fitpack routines "\ - "for other types.") + + if isinstance(kind, integer): + kind = {0:'zero', + 1:'slinear', + 2:'quadratic', + 3:'cubic'}.get(kind,'bad') + + if kind not in ['zero', 'linear', 'slinear', 'quadratic', 'cubic']: + raise NotImplementedError("%d unsupported: Use fitpack "\ + "routines for other types.") x = array(x, copy=self.copy) y = array(y, copy=self.copy) @@ -201,10 +208,11 @@ self._call = self._call_linear else: oriented_y = y.swapaxes(0, axis) - minval = 4 + order = {'zero':0,'slinear':1,'quadratic':2, 'cubic':3}[kind] + minval = order + 1 len_y = oriented_y.shape[0] - self._call = self._call_cubic - self._spline = sp3rep(x,oriented_y) + self._call = self._call_spline + self._spline = splmake(x,oriented_y,order=order) len_x = len(x) if len_x != len_y: @@ -246,8 +254,10 @@ return y_new - def _call_cubic(self, x_new): - return sp3eval(self._spline,x_new) + def _call_spline(self, x_new): + x_new = asarray(x_new) + result = spleval(self._spline,x_new.ravel()) + return result.reshape(x_new.shape+result.shape[1:]) def __call__(self, x_new): """ Find linearly interpolated y_new = f(x_new). @@ -271,23 +281,28 @@ y_new = self._call(x_new) + # Rotate the values of y_new back so that they correspond to the + # correct x_new values. For N-D x_new, take the last (for linear) + # or first (for other splines) N axes + # from y_new and insert them where self.axis was in the list of axes. + nx = x_new.ndim + ny = y_new.ndim + + # 6. Fill any values that were out of bounds with fill_value. + # and + # 7. Rotate the values back to their proper place. + if self._kind == 'linear': - # 6. Fill any values that were out of bounds with fill_value. y_new[..., out_of_bounds] = self.fill_value + axes = range(ny - nx) + axes[self.axis:self.axis] = range(ny - nx, ny) + return y_new.transpose(axes) else: y_new[out_of_bounds] = self.fill_value + axes = range(ny - nx, ny) + axes[self.axis:self.axis] = range(ny - nx) + return y_new.transpose(axes) - # Rotate the values of y_new back so that they correspond to the - # correct x_new values. For N-D x_new, take the last N axes from y_new - # and insert them where self.axis was in the list of axes. - nx = x_new.ndim - ny = y_new.ndim - axes = range(ny - nx) - axes[self.axis:self.axis] = range(ny - nx, ny) - result = y_new.transpose(axes) - return result - - def _check_bounds(self, x_new): """ Check the inputs for being in the bounds of the interpolated data. @@ -330,13 +345,32 @@ self.N = self.coeffs.shape[0] def __call__(self, xnew): indxs = np.searchsorted(self.breaks, xnew)-1 - indxs[indxs<0]=0 + indxs = indxs.clip(0,len(self.breaks)) pp = self.coeffs V = np.vander(xnew,N=self.N) # res = np.diag(np.dot(V,pp[:,indxs])) res = array([np.dot(V[k,:],pp[:,indxs[k]]) for k in xrange(len(xnew))]) return res +def _get_spline2_Bb(xk, yk, kind, conds): + Np1 = len(xk) + dk = xk[1:]-xk[:-1] + if kind == 'not-a-knot': + nlu = (1,1) + B = np.ones((3,Np1)) + alpha = 2*(yk[1:]-yk[:-1])/dk + zrs = np.zeros((1,)+yk.shape[1:]) + row = (Np1-1)//2 + b = np.concatenate((alpha[:row],zrs,alpha[row:]),axis=0) + B[0,row+2:] = 0 + B[2,:(row-1)] = 0 + B[0,row+1] = dk[row-1] + B[1,row] = -dk[row]-dk[row-1] + B[2,row-1] = dk[row] + return B, b, None, nlu + else: + raise NotImplementedError("quadratic %s is not available" % kind) + def _get_spline3_Bb(xk, yk, kind, conds): # internal function to compute different tri-diagonal system # depending on the kind of spline requested. @@ -358,16 +392,20 @@ b[0] -= m0 b[-1] -= mN - # put m0 and mN into the correct shape for - # concatenation - m0 = array(m0,copy=0,ndmin=yk.ndim) - mN = array(mN,copy=0,ndmin=yk.ndim) - if m0.shape[1:] != yk.shape[1:]: - m0 = m0*(ones(yk.shape[1:])[newaxis,...]) - if mN.shape[1:] != yk.shape[1:]: - mN = mN*(ones(yk.shape[1:])[newaxis,...]) + def append_func(mk): + # put m0 and mN into the correct shape for + # concatenation + m0 = array(m0,copy=0,ndmin=yk.ndim) + mN = array(mN,copy=0,ndmin=yk.ndim) + if m0.shape[1:] != yk.shape[1:]: + m0 = m0*(ones(yk.shape[1:])[newaxis,...]) + if mN.shape[1:] != yk.shape[1:]: + mN = mN*(ones(yk.shape[1:])[newaxis,...]) + mk = concatenate((m0,mk),axis=0) + mk = concatenate((mk,mN),axis=0) + return mk - return B, b, m0, mN + return B, b, append_func, None elif kind in ['clamped', 'endslope', 'first', 'not-a-knot', 'runout', @@ -434,9 +472,33 @@ return B, b, None, None else: raise ValueError, "%s not supported" % kind - -def sp3eval((mk,xk,yk),xnew): +def _sp0eval((mk,xk,yk),xnew): + indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) + return yk[indxs-1] + +def _sp0topp(mk,xk,yk): + c0 = yk + return ppform(array([c0]),xk) + +def _sp1eval((mk,xk,yk),xnew): + indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) + indxsm1 = indxs-1 + d = xnew - xk[indxs] + dk = (x[1:]-x[:-1])[indxsm1] + wk = yk[indxs] + wkm1 = yk[indxsm1] + res = (wk-wkm1)/dk + res *= d + res += wk + return res + +def _sp1topp(mk,xk,yk): + c1 = (yk[1:]-yk[:-1])/(xk[1:]-xk[:-1]) + c0 = yk[1:] - c1*xk[1:] + return ppform(array([c1,c0]), xk) + +def _sp3eval((mk,xk,yk),xnew): """Evaluate a cubic-spline representation of the points (xk,yk) at the new values xnew. The mk values are the second derivatives at xk The xk vector must be sorted. @@ -446,7 +508,7 @@ The first-dimension will be considered the interpolating dimension. """ indxs = np.searchsorted(xk, xnew) - indxs[indxs==0] = 1 + indxs = indxs.clip(1,len(xk)) indxsm1 = indxs-1 xkm1 = xk[indxsm1] xkvals = xk[indxs] @@ -460,7 +522,7 @@ val += (yk[indxs]/dk - mk0*dk/6.)*dm1 return val -def sp3topp(mk,xk,yk): +def _sp3topp(mk,xk,yk): """Return an N-d array providing the piece-wise polynomial form. mk - second derivative at the knots @@ -484,36 +546,136 @@ c0 += (yk[:-1]*xk[1:] - yk[1:]*xk[:-1])/dk return ppform([c3,c2,c1,c0], xk) -def sp3rep(xk,yk,kind='not-a-knot',conds=None): - """Return a (Spp,xk,yk) representation of a cubic spline given +def splmake(xk,yk,order=3,kind='not-a-knot',conds=None): + """Return an (mk,xk,yk) representation of a spline given data-points yk can be an N-d array to represent more than one curve, through the same xk points. The first dimension is assumed to be the interpolating dimenion. - kind can be 'natural', 'second', 'clamped', 'endslope', 'periodic', - 'symmetric', 'parabolic', 'not-a-knot', 'runout' + kind can be 'natural', 'second', 'first', 'clamped', 'endslope', + 'periodic', 'symmetric', 'parabolic', 'not-a-knot', + 'runout' - for 'second', and 'clamped' conditions can be given which should + for 'second', and 'first' conditions can be given which should be the desired second and first derivatives at the end-points, respectively. """ yk = np.asanyarray(yk) N = yk.shape[0]-1 - B,b,first,last = _get_spline3_Bb(xk, yk, kind, conds) - mk = np.dual.solve(B,b) - if first is not None: - mk = np.concatenate((first, mk), axis=0) - if last is not None: - mk = np.concatenate((mk, last), axis=0) - return mk, xk, yk -def spline3(xk,yk,xnew,kind='not-a-knot',conds=None): - return sp3eval(sp3rep(xk,yk,kind=kind,conds=conds),xnew) + order = int(order) + if order in [0,1]: + return order, xk, yk, order + if order < 2: + raise ValueError("order cannot be negative") -def sp2rep(xk,yk): - pass + try: + func = eval('_get_spline%d_Bb'%order) + except NameError: + raise ValueError("order %d not available" % order) -def sp2eval(xk,yk): - pass + B,b,exfunc,nlu = func(xk, yk, kind, conds) + + if nlu is None: + mk = np.dual.solve(B,b) + else: + mk = slin.solve_banded(nlu,B,b) + + if exfunc is not None: + # need to add additional values to mk + # using the returned function + mk = exfunc(mk) + + return mk, xk, yk, order + +def spleval((mk,xk,yk,order),xnew): + func = eval('_sp%deval'%order) + return func((mk,xk,yk),xnew) + +def spltopp(mk,xk,yk,order=3): + return eval('_sp%dtopp'%order)(mk,xk,yk) + +def spline(xk,yk,xnew,order=3,kwds='not-a-knot',conds=None): + func = eval('_sp%deval'%order) + return func(splmake(xk,yk,order=order,kind=kind,conds=conds),xnew) + +def _sp2topp(zk,xk,yk): + dk = xk[1:]-xk[:-1] + c2 = (zk[1:]-zk[:-1])/(2*dk) + c1 = (xk[1:]*zk[:-1]-xk[:-1]*zk[1:])/dk + c0 = (zk[1:]*xk[:-1]**2 - zk[:-1]*xk[1:]**2)/(2*dk) + c0 += yk[1:]- zk[1:]*dk/2. + return ppform([c2,c1,c0],xk) + +def _sp2eval((zk,xk,yk),xnew): + indxs = np.searchsorted(xk, xnew) + indxs = indxs.clip(1,len(xk)) + indxsm1 = indxs-1 + dk = (xk[1:]-xk[:-1])[indxsm1] + d = xnew - xk[indxs] + zk0 = zk[indxs] + res = (zk0-zk[indxsm1])/(2*dk) + res *= d + res += zk0 + res *= d + res += wk[indxs] + return res + +def _sp4topp(mk,xk,yk): + raise NotImplementedError + +def _sp4eval((mk,xk,yk),xnew): + nk = mk[1::2] # second-derivatives + mk = mk[::2] # third-derivatives + indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) + indxsm1 = indxs-1 + dk = (xk[1:]-xk[:-1])[indxsm1] + d = xnew - xk[indxs] + nk0 = nk[indxs] + nkm1 = nk[indxsm1] + mk0 = mk[indxs] + wk = yk[indxs] + wkm1 = yk[indxsm1] + res = (nk0-nkm1)/(24*dk)*d + res += nk0/6. + res *= d + res += mk0/2. + res *= d + res += mk0*dk/2. + (wk-wkm1)/dk - (3*nk0+nkm1)*(dk**2)/24. + res *= d + res += wk + return res + +def _sp5topp(mk,xk,yk): + raise NotImplementedError + +def _sp5eval((mk,xk,yk),xnew): + mk = mk[::3] + nk = mk[1::3] + ok = mk[2::3] + indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) + indxsm1 = indxs-1 + dk = (xk[1:]-xk[:-1])[indxsm1] + d = xnew - xk[indxs] + ok0 = ok[indxs] + okm1 = ok[indxsm1] + nk0 = nk[indxs] + mk0 = mk[indxs] + wk = yk[indxs] + wkm1 = yk[indxsm1] + res = (ok0-ok1)/(120*dk) + res *= d + res += ok0/24. + res *= d + res += nk0/6. + res *= d + res += mk0/2. + res *= d + res += (4*ok0+okm1)*(dk**3)/120. - nk0*(dk**2)/6. + res += mk0*dk/2. + (wk-wkm1)/dk + res *= d + res += wk + return res + From scipy-svn at scipy.org Tue May 15 07:31:53 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 06:31:53 -0500 (CDT) Subject: [Scipy-svn] r3005 - trunk/Lib/interpolate Message-ID: <20070515113153.4CB6839C07E@new.scipy.org> Author: oliphant Date: 2007-05-15 06:31:51 -0500 (Tue, 15 May 2007) New Revision: 3005 Modified: trunk/Lib/interpolate/interpolate.py Log: Start to use banded solver in cubic spline implementation. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-15 11:18:19 UTC (rev 3004) +++ trunk/Lib/interpolate/interpolate.py 2007-05-15 11:31:51 UTC (rev 3005) @@ -356,8 +356,9 @@ Np1 = len(xk) dk = xk[1:]-xk[:-1] if kind == 'not-a-knot': + # use banded-solver nlu = (1,1) - B = np.ones((3,Np1)) + B = ones((3,Np1)) alpha = 2*(yk[1:]-yk[:-1])/dk zrs = np.zeros((1,)+yk.shape[1:]) row = (Np1-1)//2 @@ -383,9 +384,14 @@ m0, mN = conds # the matrix to invert is (N-1,N-1) + # use banded solver beta = 2*(xk[2:]-xk[:-2]) alpha = xk[1:]-xk[:-1] - B = np.diag(alpha[1:-1],k=-1) + np.diag(beta) + np.diag(alpha[2:],k=1) + nlu = (1,1) + B = np.empty((3,Np1-2)) + B[0,1:] = alpha[2:] + B[1,:] = beta + B[2,:-1] = alpha[1:-1] dyk = yk[1:]-yk[:-1] b = (dyk[1:]/alpha[1:] - dyk[:-1]/alpha[:-1]) b *= 6 @@ -395,17 +401,17 @@ def append_func(mk): # put m0 and mN into the correct shape for # concatenation - m0 = array(m0,copy=0,ndmin=yk.ndim) - mN = array(mN,copy=0,ndmin=yk.ndim) - if m0.shape[1:] != yk.shape[1:]: - m0 = m0*(ones(yk.shape[1:])[newaxis,...]) - if mN.shape[1:] != yk.shape[1:]: - mN = mN*(ones(yk.shape[1:])[newaxis,...]) - mk = concatenate((m0,mk),axis=0) - mk = concatenate((mk,mN),axis=0) + ma = array(m0,copy=0,ndmin=yk.ndim) + mb = array(mN,copy=0,ndmin=yk.ndim) + if ma.shape[1:] != yk.shape[1:]: + ma = ma*(ones(yk.shape[1:])[np.newaxis,...]) + if mb.shape[1:] != yk.shape[1:]: + mb = mb*(ones(yk.shape[1:])[np.newaxis,...]) + mk = np.concatenate((ma,mk),axis=0) + mk = np.concatenate((mk,mb),axis=0) return mk - return B, b, append_func, None + return B, b, append_func, nlu elif kind in ['clamped', 'endslope', 'first', 'not-a-knot', 'runout', From scipy-svn at scipy.org Tue May 15 07:57:46 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 06:57:46 -0500 (CDT) Subject: [Scipy-svn] r3006 - in trunk/Lib: odr sandbox/arpack sandbox/exmplpackage sandbox/pysparse sandbox/spline sandbox/xplt Message-ID: <20070515115746.4D08739C079@new.scipy.org> Author: jarrod.millman Date: 2007-05-15 06:57:40 -0500 (Tue, 15 May 2007) New Revision: 3006 Modified: trunk/Lib/odr/setup.py trunk/Lib/sandbox/arpack/setup.py trunk/Lib/sandbox/exmplpackage/setup.py trunk/Lib/sandbox/pysparse/setup.py trunk/Lib/sandbox/spline/setup.py trunk/Lib/sandbox/xplt/setup.py Log: more cosmetic cleanup of os.path imports for consistency Modified: trunk/Lib/odr/setup.py =================================================================== --- trunk/Lib/odr/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/odr/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -1,18 +1,11 @@ #!/usr/bin/env python from os.path import join -from distutils import dep_util -from glob import glob -import warnings -from numpy.distutils.core import Extension -from numpy.distutils.misc_util import get_path, Configuration, dot_join - -from numpy.distutils.system_info import get_info,dict_append,\ - AtlasNotFoundError,LapackNotFoundError,BlasNotFoundError,\ - LapackSrcNotFoundError,BlasSrcNotFoundError - def configuration(parent_package='', top_path=None): + import warnings + from numpy.distutils.misc_util import Configuration + from numpy.distutils.system_info import get_info, BlasNotFoundError config = Configuration('odr', parent_package, top_path) libodr_files = ['d_odr.f', Modified: trunk/Lib/sandbox/arpack/setup.py =================================================================== --- trunk/Lib/sandbox/arpack/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/sandbox/arpack/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -1,6 +1,6 @@ #!/usr/bin/env python -import os +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.system_info import get_info, NotFoundError @@ -13,13 +13,13 @@ config = Configuration('arpack', parent_package, top_path) - arpack_sources=[os.path.join('ARPACK','SRC', '*.f')] - arpack_sources.extend([os.path.join('ARPACK','UTIL', '*.f')]) -# arpack_sources.extend([os.path.join('ARPACK','BLAS', '*.f')]) - arpack_sources.extend([os.path.join('ARPACK','LAPACK', '*.f')]) + arpack_sources=[join('ARPACK','SRC', '*.f')] + arpack_sources.extend([join('ARPACK','UTIL', '*.f')]) +# arpack_sources.extend([join('ARPACK','BLAS', '*.f')]) + arpack_sources.extend([join('ARPACK','LAPACK', '*.f')]) config.add_library('arpack', sources=arpack_sources, - include_dirs=[os.path.join('ARPACK', 'SRC')]) + include_dirs=[join('ARPACK', 'SRC')]) config.add_extension('_arpack', Modified: trunk/Lib/sandbox/exmplpackage/setup.py =================================================================== --- trunk/Lib/sandbox/exmplpackage/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/sandbox/exmplpackage/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -1,5 +1,7 @@ #!/usr/bin/env python +from os.path import join + def configuration(parent_package='',top_path=None): # The following two lines with `return config` constitutes a # minimal contents of configuration(..) that is suitable for pure @@ -15,9 +17,8 @@ # exmplpackage generates source code, that will be processed with f2py def generate_spam_pyf(ext, build_dir): - import os from distutils.dep_util import newer - target = os.path.join(build_dir,'spam.pyf') + target = join(build_dir,'spam.pyf') source = ext.depends[0] if newer(source,target): fin = open(source) Modified: trunk/Lib/sandbox/pysparse/setup.py =================================================================== --- trunk/Lib/sandbox/pysparse/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/sandbox/pysparse/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -10,20 +10,19 @@ working around PySparse's weird inconsistency in its module name.) """ -import os -from numpy.distutils.core import Extension -from numpy.distutils.misc_util import get_path,Configuration,dot_join -join = os.path.join -import glob +from os.path import join +from glob import glob + def configuration(parent_package='',parent_path=None): + from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info config = Configuration('pysparse', parent_package, parent_path) config.add_data_dir('docs') config.add_data_dir('examples') config.add_data_dir('tests') - headers = glob.glob(os.path.join ("include","pysparse","*.h")) + headers = glob(join("include","pysparse","*.h")) config.add_extension('pysparse', sources = ['src/spmatrixmodule.c'], include_dirs = ['include/'] Modified: trunk/Lib/sandbox/spline/setup.py =================================================================== --- trunk/Lib/sandbox/spline/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/sandbox/spline/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -1,6 +1,6 @@ #!/usr/bin/env python -import os +from os.path import join def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration @@ -8,7 +8,7 @@ config = Configuration('spline', parent_package, top_path) config.add_library('fitpack', - sources=[os.path.join('fitpack', '*.f')], + sources=[join('fitpack', '*.f')], ) config.add_extension('dfitpack', Modified: trunk/Lib/sandbox/xplt/setup.py =================================================================== --- trunk/Lib/sandbox/xplt/setup.py 2007-05-15 11:31:51 UTC (rev 3005) +++ trunk/Lib/sandbox/xplt/setup.py 2007-05-15 11:57:40 UTC (rev 3006) @@ -1,8 +1,7 @@ -## Automatically adapted for scipy Oct 31, 2005 by - #!/usr/bin/env python -import os +from os import environ +from os.path import abspath, dirname, join import sys from distutils import dir_util from distutils.sysconfig import get_python_lib @@ -37,7 +36,7 @@ x11 = 0 if not (windows or cygwin or macosx): x11 = 1 -if 'NO_XLIB' in os.environ: +if 'NO_XLIB' in environ: x11 = 0 @@ -193,15 +192,15 @@ include_dirs = ['src/gist', 'src/play', 'src/play/unix' ] - library_dirs = [os.path.join(local_path,x) for x in ['.','src']] + library_dirs = [join(local_path,x) for x in ['.','src']] library_dirs.extend(x11_info.get('library_dirs',[])) library_dirs.extend(get_special_dirs(sys.platform)) - include_dirs = [os.path.join(local_path,x) for x in include_dirs] + include_dirs = [join(local_path,x) for x in include_dirs] include_dirs.extend(x11_info.get('include_dirs',[])) if 1: - inputfile = open(os.path.join(config_path,"Make.cfg")) + inputfile = open(join(config_path,"Make.cfg")) lines = inputfile.readlines() inputfile.close() for line in lines: @@ -240,11 +239,11 @@ config = Configuration('xplt',parent_package, top_path) local_path = config.local_path - all_playsource = [os.path.join('src','play','*','*.c'), - os.path.join('src','play','*.h') + all_playsource = [join('src','play','*','*.c'), + join('src','play','*.h') ] - gistpath = os.path.join(get_python_lib(1),config.path_in_package,"gistdata") + gistpath = join(get_python_lib(1),config.path_in_package,"gistdata") gistpath = gistpath.replace("\\",r"\\\\") def get_playsource(extension,build_dir): @@ -256,9 +255,9 @@ playsource = unixsource + macsource + allsource else: playsource = unixsource + x11source + allsource - sources = [os.path.join(local_path,n) for n in playsource] + sources = [join(local_path,n) for n in playsource] - config_path = os.path.join(build_dir,'config_pygist') + config_path = join(build_dir,'config_pygist') dir_util.mkpath(config_path) conf = config_pygist(local_path,config_path) # Look to see if compiler is set on command line and add it @@ -279,7 +278,7 @@ include_dirs, library_dirs, libraries, \ extra_compile_args, extra_link_args \ = getallparams(gistpath,local_path,config_path) - include_dirs.insert(0,os.path.dirname(conf.config_h)) + include_dirs.insert(0,dirname(conf.config_h)) extension.include_dirs.extend(include_dirs) extension.library_dirs.extend(library_dirs) @@ -291,7 +290,7 @@ - gistC = os.path.join('pygist','gistCmodule.c') + gistC = join('pygist','gistCmodule.c') sources = gistsource sources = [gistC] + sources + [get_playsource] @@ -300,16 +299,16 @@ depends = ['src'] ) config.add_extension('gistfuncs', - [os.path.join('pygist','gistfuncsmodule.c')]) + [join('pygist','gistfuncsmodule.c')]) file_ext = ['*.gs','*.gp', '*.ps', '*.help'] - xplt_files = [os.path.join('gistdata',x) for x in file_ext] - xplt_files += [os.path.join('src','g',x) for x in file_ext] + xplt_files = [join('gistdata',x) for x in file_ext] + xplt_files += [join('src','g',x) for x in file_ext] config.add_data_dir('gistdata') - config.add_data_dir((os.path.join(config.path_in_package,'gistdata'), - os.path.abspath(config.paths('src/g')[0]))) + config.add_data_dir((join(config.path_in_package,'gistdata'), + abspath(config.paths('src/g')[0]))) return config From scipy-svn at scipy.org Tue May 15 15:41:07 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 14:41:07 -0500 (CDT) Subject: [Scipy-svn] r3007 - trunk/Lib/sandbox/numexpr Message-ID: <20070515194107.C901239C041@new.scipy.org> Author: cookedm Date: 2007-05-15 14:41:05 -0500 (Tue, 15 May 2007) New Revision: 3007 Modified: trunk/Lib/sandbox/numexpr/compiler.py trunk/Lib/sandbox/numexpr/expressions.py Log: [numexpr] Simplify using a context for evaluating expressions (also threadsafe for Python >= 2.4) Modified: trunk/Lib/sandbox/numexpr/compiler.py =================================================================== --- trunk/Lib/sandbox/numexpr/compiler.py 2007-05-15 11:57:40 UTC (rev 3006) +++ trunk/Lib/sandbox/numexpr/compiler.py 2007-05-15 19:41:05 UTC (rev 3007) @@ -185,47 +185,29 @@ def __str__(self): return 'Immediate(%d)' % (self.node.value,) -def makeExpressions(context): - """Make private copy of the expressions module with a custom get_context(). - - An attempt was made to make this threadsafe, but I can't guarantee it's - bulletproof. - """ - import sys, imp - modname = __name__[:__name__.rfind('.')] + '.expressions' - # get our own, private copy of expressions - imp.acquire_lock() - try: - old = sys.modules.pop(modname) - import expressions - private = sys.modules.pop(modname) - sys.modules[modname] = old - finally: - imp.release_lock() - def get_context(): - return context - private.get_context = get_context - return private - def stringToExpression(s, types, context): """Given a string, convert it to a tree of ExpressionNode's. """ - expr = makeExpressions(context) - # first compile to a code object to determine the names - c = compile(s, '', 'eval') - # make VariableNode's for the names - names = {} - for name in c.co_names: - if name == "None": - names[name] = None - else: - t = types.get(name, float) - names[name] = expr.VariableNode(name, type_to_kind[t]) - names.update(expr.functions) - # now build the expression - ex = eval(c, names) - if expressions.isConstant(ex): - ex = expr.ConstantNode(ex, expressions.getKind(ex)) + old_ctx = expressions._context.ctx + try: + expressions._context.ctx = context + # first compile to a code object to determine the names + c = compile(s, '', 'eval') + # make VariableNode's for the names + names = {} + for name in c.co_names: + if name == "None": + names[name] = None + else: + t = types.get(name, float) + names[name] = expressions.VariableNode(name, type_to_kind[t]) + names.update(expressions.functions) + # now build the expression + ex = eval(c, names) + if expressions.isConstant(ex): + ex = expressions.ConstantNode(ex, expressions.getKind(ex)) + finally: + expressions._context.ctx = old_ctx return ex Modified: trunk/Lib/sandbox/numexpr/expressions.py =================================================================== --- trunk/Lib/sandbox/numexpr/expressions.py 2007-05-15 11:57:40 UTC (rev 3006) +++ trunk/Lib/sandbox/numexpr/expressions.py 2007-05-15 19:41:05 UTC (rev 3007) @@ -1,9 +1,11 @@ __all__ = ['E'] import operator -import numpy import sys +import threading +import numpy + import interpreter class Expression(object): @@ -18,12 +20,16 @@ E = Expression() +try: + _context = threading.local() +except AttributeError: + class Context(object): + pass + _context = Context() +_context.ctx = {} -def get_context(): - """Context used to evaluate expression. Typically overridden in compiler.""" - return {} def get_optimization(): - return get_context().get('optimization', 'none') + return _context.ctx.get('optimization', 'none') # helper functions for creating __magic__ methods def ophelper(f): From scipy-svn at scipy.org Tue May 15 16:56:40 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 15 May 2007 15:56:40 -0500 (CDT) Subject: [Scipy-svn] r3008 - in trunk/Lib/interpolate: . tests Message-ID: <20070515205640.90FD339C02F@new.scipy.org> Author: oliphant Date: 2007-05-15 15:56:36 -0500 (Tue, 15 May 2007) New Revision: 3008 Modified: trunk/Lib/interpolate/interpolate.py trunk/Lib/interpolate/tests/test_interpolate.py Log: Fix typo in interpolate.py and add some 'completion-only' tests Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-15 19:41:05 UTC (rev 3007) +++ trunk/Lib/interpolate/interpolate.py 2007-05-15 20:56:36 UTC (rev 3008) @@ -178,14 +178,14 @@ self.fill_value = fill_value - if isinstance(kind, integer): + if isinstance(kind, int): kind = {0:'zero', 1:'slinear', 2:'quadratic', - 3:'cubic'}.get(kind,'bad') + 3:'cubic'}.get(kind,'none') if kind not in ['zero', 'linear', 'slinear', 'quadratic', 'cubic']: - raise NotImplementedError("%d unsupported: Use fitpack "\ + raise NotImplementedError("%d is unsupported: Use fitpack "\ "routines for other types.") x = array(x, copy=self.copy) y = array(y, copy=self.copy) Modified: trunk/Lib/interpolate/tests/test_interpolate.py =================================================================== --- trunk/Lib/interpolate/tests/test_interpolate.py 2007-05-15 19:41:05 UTC (rev 3007) +++ trunk/Lib/interpolate/tests/test_interpolate.py 2007-05-15 20:56:36 UTC (rev 3008) @@ -39,9 +39,16 @@ are given to the constructor. """ - # Only kind='linear' is implemented. - self.assertRaises(NotImplementedError, interp1d, self.x10, self.y10, kind='cubic') + # These should all work. interp1d(self.x10, self.y10, kind='linear') + interp1d(self.x10, self.y10, kind='cubic') + interp1d(self.x10, self.y10, kind='slinear') + interp1d(self.x10, self.y10, kind='quadratic') + interp1d(self.x10, self.y10, kind='zero') + interp1d(self.x10, self.y10, kind=0) + interp1d(self.x10, self.y10, kind=1) + interp1d(self.x10, self.y10, kind=2) + interp1d(self.x10, self.y10, kind=3) # x array must be 1D. self.assertRaises(ValueError, interp1d, self.x25, self.y10) From scipy-svn at scipy.org Wed May 16 13:05:55 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 16 May 2007 12:05:55 -0500 (CDT) Subject: [Scipy-svn] r3009 - in trunk/Lib/special: cephes specfun tests Message-ID: <20070516170555.51CBF39C0A3@new.scipy.org> Author: cookedm Date: 2007-05-16 12:05:50 -0500 (Wed, 16 May 2007) New Revision: 3009 Modified: trunk/Lib/special/cephes/polmisc.c trunk/Lib/special/cephes/polyn.c trunk/Lib/special/specfun/specfun.f trunk/Lib/special/tests/test_basic.py Log: Lib/special: clean up specfun.f a bit Modified: trunk/Lib/special/cephes/polmisc.c =================================================================== --- trunk/Lib/special/cephes/polmisc.c 2007-05-15 20:56:36 UTC (rev 3008) +++ trunk/Lib/special/cephes/polmisc.c 2007-05-16 17:05:50 UTC (rev 3009) @@ -4,9 +4,7 @@ */ #include -#ifndef __APPLE__ -#include -#endif +#include #include "mconf.h" #ifndef ANSIPROT double atan2(), sqrt(), fabs(), sin(), cos(); Modified: trunk/Lib/special/cephes/polyn.c =================================================================== --- trunk/Lib/special/cephes/polyn.c 2007-05-15 20:56:36 UTC (rev 3008) +++ trunk/Lib/special/cephes/polyn.c 2007-05-16 17:05:50 UTC (rev 3009) @@ -60,14 +60,8 @@ */ #include -void exit(int); -#ifndef NULL -#define NULL 0 -#endif +#include #include "mconf.h" -#ifndef __APPLE__ -#include -#endif /* near pointer version of malloc() */ /* Modified: trunk/Lib/special/specfun/specfun.f =================================================================== --- trunk/Lib/special/specfun/specfun.f 2007-05-15 20:56:36 UTC (rev 3008) +++ trunk/Lib/special/specfun/specfun.f 2007-05-16 17:05:50 UTC (rev 3009) @@ -699,7 +699,7 @@ DO 10 K=1,NM J=2*K-2+M+IP SU0=SU0+DF(K)*QM(J) - IF (K.GT.NM1.AND.DABS(SU0-SW).LT.DABS(SU0)*EPS) GO TO 15 + IF (K.GT.NM1.AND.DABS(SU0-SW).LT.DABS(SU0)*EPS) GO TO 15 10 SW=SU0 15 SD0=0.0D0 DO 20 K=1,NM @@ -1144,7 +1144,7 @@ IF (K.LE.NM) CSJ(K)=CF CF0=CF1 15 CF1=CF - IF (CDABS(CSA).GT.CDABS(CSB)) CS=CSA/CF + IF (CDABS(CSA).GT.CDABS(CSB)) CS=CSA/CF1 IF (CDABS(CSA).LE.CDABS(CSB)) CS=CSB/CF0 DO 20 K=0,NM 20 CSJ(K)=CS*CSJ(K) @@ -1609,7 +1609,7 @@ IMPLICIT DOUBLE PRECISION (A-H,O-Z) IF (M.LE.12.OR.Q.LE.3.0*M.OR.Q.GT.M*M) THEN CALL CV0(KD,M,Q,A) - IF (Q.NE.0.0D0) CALL REFINE(KD,M,Q,A,1) + IF (Q.NE.0.0D0) CALL REFINE(KD,M,Q,A) ELSE NDIV=10 DELTA=(M-3.0)*M/NDIV @@ -1626,7 +1626,7 @@ A=(A1*Q2-A2*Q1+(A2-A1)*QQ)/(Q2-Q1) IFLAG=1 IF (I.EQ.NN) IFLAG=-1 - CALL REFINE(KD,M,QQ,A,IFLAG) + CALL REFINE(KD,M,QQ,A) Q1=Q2 Q2=QQ A1=A2 @@ -1650,7 +1650,7 @@ A=(A1*Q2-A2*Q1+(A2-A1)*QQ)/(Q2-Q1) IFLAG=1 IF (I.EQ.NN) IFLAG=-1 - CALL REFINE(KD,M,QQ,A,IFLAG) + CALL REFINE(KD,M,QQ,A) Q1=Q2 Q2=QQ A1=A2 @@ -2007,7 +2007,7 @@ C ********************************** - SUBROUTINE REFINE(KD,M,Q,A,IFLAG) + SUBROUTINE REFINE(KD,M,Q,A) C C ===================================================== C Purpose: calculate the accurate characteristic value @@ -2029,7 +2029,7 @@ CALL CVF(KD,M,Q,X0,MJ,F0) X1=1.002*A CALL CVF(KD,M,Q,X1,MJ,F1) -5 DO 10 IT=1,100 + DO 10 IT=1,100 MJ=MJ+1 X=X1-(X1-X0)/(1.0D0-F0/F1) CALL CVF(KD,M,Q,X,MJ,F) @@ -6307,7 +6307,7 @@ 150 ZHF=ZF0+ZF1 ENDIF ENDIF -155 A=AA + A=AA B=BB IF (K.GT.150) WRITE(*,160) 160 FORMAT(1X,'Warning! You should check the accuracy') @@ -9750,7 +9750,7 @@ Z=ZERO W=0.0D0 DO 35 NR=1,NT -10 IF (NR.NE.1) Z=ZO(NR-1)-H + IF (NR.NE.1) Z=ZO(NR-1)-H IT=0 15 IT=IT+1 CALL CY01(KF,Z,ZF,ZD) Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-15 20:56:36 UTC (rev 3008) +++ trunk/Lib/special/tests/test_basic.py 2007-05-16 17:05:50 UTC (rev 3009) @@ -193,7 +193,8 @@ cephes.hankel2e(1,1) def check_hyp1f1(self): - cephes.hyp1f1(1,1,1) + assert_approx_equal(cephes.hyp1f1(1,1,1), exp(1.0)) + assert_approx_equal(cephes.hyp1f1(3,4,-6), 0.026056422099537251095) def check_hyp1f2(self): cephes.hyp1f2(1,1,1,1) def check_hyp2f0(self): From scipy-svn at scipy.org Wed May 16 13:12:19 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 16 May 2007 12:12:19 -0500 (CDT) Subject: [Scipy-svn] r3010 - in trunk/Lib/special: cephes tests Message-ID: <20070516171219.A169339C0A3@new.scipy.org> Author: cookedm Date: 2007-05-16 12:12:16 -0500 (Wed, 16 May 2007) New Revision: 3010 Modified: trunk/Lib/special/cephes/polmisc.c trunk/Lib/special/cephes/polyn.c trunk/Lib/special/tests/test_basic.py Log: Revert extra accidental commits in r3009 Modified: trunk/Lib/special/cephes/polmisc.c =================================================================== --- trunk/Lib/special/cephes/polmisc.c 2007-05-16 17:05:50 UTC (rev 3009) +++ trunk/Lib/special/cephes/polmisc.c 2007-05-16 17:12:16 UTC (rev 3010) @@ -4,7 +4,9 @@ */ #include -#include +#ifndef __APPLE__ +#include +#endif #include "mconf.h" #ifndef ANSIPROT double atan2(), sqrt(), fabs(), sin(), cos(); Modified: trunk/Lib/special/cephes/polyn.c =================================================================== --- trunk/Lib/special/cephes/polyn.c 2007-05-16 17:05:50 UTC (rev 3009) +++ trunk/Lib/special/cephes/polyn.c 2007-05-16 17:12:16 UTC (rev 3010) @@ -60,8 +60,14 @@ */ #include -#include +void exit(int); +#ifndef NULL +#define NULL 0 +#endif #include "mconf.h" +#ifndef __APPLE__ +#include +#endif /* near pointer version of malloc() */ /* Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-16 17:05:50 UTC (rev 3009) +++ trunk/Lib/special/tests/test_basic.py 2007-05-16 17:12:16 UTC (rev 3010) @@ -193,8 +193,7 @@ cephes.hankel2e(1,1) def check_hyp1f1(self): - assert_approx_equal(cephes.hyp1f1(1,1,1), exp(1.0)) - assert_approx_equal(cephes.hyp1f1(3,4,-6), 0.026056422099537251095) + cephes.hyp1f1(1,1,1) def check_hyp1f2(self): cephes.hyp1f2(1,1,1,1) def check_hyp2f0(self): From scipy-svn at scipy.org Wed May 16 13:13:25 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 16 May 2007 12:13:25 -0500 (CDT) Subject: [Scipy-svn] r3011 - trunk/Lib/special/cephes Message-ID: <20070516171325.9DF0739C0A3@new.scipy.org> Author: cookedm Date: 2007-05-16 12:13:23 -0500 (Wed, 16 May 2007) New Revision: 3011 Modified: trunk/Lib/special/cephes/polmisc.c trunk/Lib/special/cephes/polyn.c Log: Lib/special: fix up includes Modified: trunk/Lib/special/cephes/polmisc.c =================================================================== --- trunk/Lib/special/cephes/polmisc.c 2007-05-16 17:12:16 UTC (rev 3010) +++ trunk/Lib/special/cephes/polmisc.c 2007-05-16 17:13:23 UTC (rev 3011) @@ -4,9 +4,7 @@ */ #include -#ifndef __APPLE__ -#include -#endif +#include #include "mconf.h" #ifndef ANSIPROT double atan2(), sqrt(), fabs(), sin(), cos(); Modified: trunk/Lib/special/cephes/polyn.c =================================================================== --- trunk/Lib/special/cephes/polyn.c 2007-05-16 17:12:16 UTC (rev 3010) +++ trunk/Lib/special/cephes/polyn.c 2007-05-16 17:13:23 UTC (rev 3011) @@ -60,14 +60,8 @@ */ #include -void exit(int); -#ifndef NULL -#define NULL 0 -#endif +#include #include "mconf.h" -#ifndef __APPLE__ -#include -#endif /* near pointer version of malloc() */ /* From scipy-svn at scipy.org Wed May 16 16:01:52 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 16 May 2007 15:01:52 -0500 (CDT) Subject: [Scipy-svn] r3012 - in trunk/Lib/special: cephes tests Message-ID: <20070516200152.6325339C235@new.scipy.org> Author: cookedm Date: 2007-05-16 15:01:46 -0500 (Wed, 16 May 2007) New Revision: 3012 Modified: trunk/Lib/special/cephes/hyperg.c trunk/Lib/special/tests/test_basic.py Log: Lib/special/cephes/hyperg.c: Use Kahan summation in power series sum for hyp1f1, instead of ad-hoc calculation of roundoff/cancellation error. Modified: trunk/Lib/special/cephes/hyperg.c =================================================================== --- trunk/Lib/special/cephes/hyperg.c 2007-05-16 17:13:23 UTC (rev 3011) +++ trunk/Lib/special/cephes/hyperg.c 2007-05-16 20:01:46 UTC (rev 3012) @@ -132,6 +132,7 @@ { double n, a0, sum, t, u, temp; double an, bn, maxt; +double y, c, sumc; /* set up for power series summation */ @@ -139,6 +140,7 @@ bn = b; a0 = 1.0; sum = 1.0; +c = 0.0; n = 1.0; t = 1.0; maxt = 0.0; @@ -167,17 +169,14 @@ } a0 *= u; - sum += a0; + + y = a0 - c; + sumc = sum + y; + c = (sumc - sum) - y; + sum = sumc; + t = fabs(a0); - if( t > maxt ) - maxt = t; -/* - if( (maxt/fabs(sum)) > 1.0e17 ) - { - pcanc = 1.0; - goto blowup; - } -*/ + an += 1.0; bn += 1.0; n += 1.0; @@ -186,10 +185,11 @@ pdone: /* estimate error due to roundoff and cancellation */ -if( sum != 0.0 ) - maxt /= fabs(sum); -maxt *= MACHEP; /* this way avoids multiply overflow */ -*err = fabs( MACHEP * n + maxt ); +if (sum != 0.0) { + *err = fabs(c / sum); +} else { + *err = fabs(c); +} return( sum ); } Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-16 17:13:23 UTC (rev 3011) +++ trunk/Lib/special/tests/test_basic.py 2007-05-16 20:01:46 UTC (rev 3012) @@ -193,6 +193,8 @@ cephes.hankel2e(1,1) def check_hyp1f1(self): + assert_approx_equal(cephes.hyp1f1(1,1,1), exp(1.0)) + assert_approx_equal(cephes.hyp1f1(3,4,-6), 0.026056422099537251095) cephes.hyp1f1(1,1,1) def check_hyp1f2(self): cephes.hyp1f2(1,1,1,1) From scipy-svn at scipy.org Wed May 16 19:12:40 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 16 May 2007 18:12:40 -0500 (CDT) Subject: [Scipy-svn] r3013 - in trunk/Lib/sandbox/maskedarray: . tests Message-ID: <20070516231240.C4ADA39C060@new.scipy.org> Author: pierregm Date: 2007-05-16 18:12:32 -0500 (Wed, 16 May 2007) New Revision: 3013 Modified: trunk/Lib/sandbox/maskedarray/core.py trunk/Lib/sandbox/maskedarray/tests/test_core.py Log: maskedarray.core : fixed the setting of the mask in getitem when _smallmask is True Modified: trunk/Lib/sandbox/maskedarray/core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/core.py 2007-05-16 20:01:46 UTC (rev 3012) +++ trunk/Lib/sandbox/maskedarray/core.py 2007-05-16 23:12:32 UTC (rev 3013) @@ -1146,11 +1146,12 @@ self._mask = make_mask_none(self.shape) self._mask[indx] = valmask elif not self._hardmask: - self._mask = self._mask.copy() + _mask = self._mask.copy() if valmask is nomask: - self._mask[indx] = False + _mask[indx] = False else: - self._mask[indx] = valmask + _mask[indx] = valmask + self._set_mask(_mask) elif hasattr(indx, 'dtype') and (indx.dtype==bool_): indx = indx * umath.logical_not(self._mask) else: Modified: trunk/Lib/sandbox/maskedarray/tests/test_core.py =================================================================== --- trunk/Lib/sandbox/maskedarray/tests/test_core.py 2007-05-16 20:01:46 UTC (rev 3012) +++ trunk/Lib/sandbox/maskedarray/tests/test_core.py 2007-05-16 23:12:32 UTC (rev 3013) @@ -1092,6 +1092,19 @@ xh[0:1] = 999 assert_equal(xh._data,[999,1,2,3,4]) + def check_smallmask(self): + "Checks the behaviour of _smallmask" + a = arange(10) + a[1] = masked + a[1] = 1 + assert_equal(a._mask, nomask) + a = arange(10) + a._smallmask = False + a[1] = masked + a[1] = 1 + assert_equal(a._mask, zeros(10)) + + def check_sort(self): "Test sort" x = array([1,4,2,3],mask=[0,1,0,0],dtype=N.uint8) From scipy-svn at scipy.org Thu May 17 03:37:33 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 17 May 2007 02:37:33 -0500 (CDT) Subject: [Scipy-svn] r3014 - trunk/Lib/integrate Message-ID: <20070517073733.62B2639C074@new.scipy.org> Author: oliphant Date: 2007-05-17 02:37:27 -0500 (Thu, 17 May 2007) New Revision: 3014 Modified: trunk/Lib/integrate/quadrature.py Log: Remove print statement in quadrature. Modified: trunk/Lib/integrate/quadrature.py =================================================================== --- trunk/Lib/integrate/quadrature.py 2007-05-16 23:12:32 UTC (rev 3013) +++ trunk/Lib/integrate/quadrature.py 2007-05-17 07:37:27 UTC (rev 3014) @@ -116,8 +116,6 @@ n = n + 1 if n == maxiter: print "maxiter (%d) exceeded. Latest difference = %e" % (n,err) - else: - print "Took %d points." % n return val, err def tupleset(t, i, value): From scipy-svn at scipy.org Thu May 17 11:36:03 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 17 May 2007 10:36:03 -0500 (CDT) Subject: [Scipy-svn] r3015 - trunk/Lib/sandbox/timeseries Message-ID: <20070517153603.747F739C016@new.scipy.org> Author: mattknox_ca Date: 2007-05-17 10:35:58 -0500 (Thu, 17 May 2007) New Revision: 3015 Modified: trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tseries.py Log: fixed exception classes so the error string outputs properly Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-17 07:37:27 UTC (rev 3014) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-17 15:35:58 UTC (rev 3015) @@ -54,14 +54,13 @@ #---- --- Date Exceptions --- #####--------------------------------------------------------------------------- class DateError(Exception): - """Defines a generic DateArrayError.""" - def __init__ (self, args=None): - "Create an exception" - Exception.__init__(self) - self.args = args + "Defines a generic DateArrayError." + def __init__ (self, value=None): + "Creates an exception." + self.value = value def __str__(self): - "Calculate the string representation" - return str(self.args) + "Calculates the string representation." + return str(self.value) __repr__ = __str__ class InsufficientDateError(DateError): Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-17 07:37:27 UTC (rev 3014) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-17 15:35:58 UTC (rev 3015) @@ -99,13 +99,12 @@ #### -------------------------------------------------------------------------- class TimeSeriesError(Exception): "Class for TS related errors." - def __init__ (self, args=None): + def __init__ (self, value=None): "Creates an exception." - Exception.__init__(self) - self.args = args + self.value = value def __str__(self): "Calculates the string representation." - return str(self.args) + return str(self.value) __repr__ = __str__ class TimeSeriesCompatibilityError(TimeSeriesError): From scipy-svn at scipy.org Fri May 18 10:17:12 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 18 May 2007 09:17:12 -0500 (CDT) Subject: [Scipy-svn] r3016 - in trunk/Lib/stsci: convolve/src image/src Message-ID: <20070518141712.A180F39C057@new.scipy.org> Author: pearu Date: 2007-05-18 09:16:27 -0500 (Fri, 18 May 2007) New Revision: 3016 Modified: trunk/Lib/stsci/convolve/src/_correlatemodule.c trunk/Lib/stsci/convolve/src/_lineshapemodule.c trunk/Lib/stsci/image/src/_combinemodule.c Log: Fixed few compiler warnings. Modified: trunk/Lib/stsci/convolve/src/_correlatemodule.c =================================================================== --- trunk/Lib/stsci/convolve/src/_correlatemodule.c 2007-05-17 15:35:58 UTC (rev 3015) +++ trunk/Lib/stsci/convolve/src/_correlatemodule.c 2007-05-18 14:16:27 UTC (rev 3016) @@ -5,6 +5,7 @@ #include #include +#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" typedef enum @@ -666,12 +667,7 @@ {NULL, NULL} /* Sentinel */ }; -/* platform independent*/ -#ifdef MS_WIN32 -__declspec(dllexport) -#endif - -void init_correlate(void) +PyMODINIT_FUNC init_correlate(void) { PyObject *m, *d; m = Py_InitModule("_correlate", _correlateMethods); Modified: trunk/Lib/stsci/convolve/src/_lineshapemodule.c =================================================================== --- trunk/Lib/stsci/convolve/src/_lineshapemodule.c 2007-05-17 15:35:58 UTC (rev 3015) +++ trunk/Lib/stsci/convolve/src/_lineshapemodule.c 2007-05-18 14:16:27 UTC (rev 3016) @@ -29,6 +29,7 @@ #include +#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" @@ -360,7 +361,7 @@ /*** module initialization ***/ -DL_EXPORT(void) init_lineshape(void) +PyMODINIT_FUNC init_lineshape(void) { PyObject *m, *d; m = Py_InitModule("_lineshape", _lineshape_Methods); Modified: trunk/Lib/stsci/image/src/_combinemodule.c =================================================================== --- trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-17 15:35:58 UTC (rev 3015) +++ trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-18 14:16:27 UTC (rev 3016) @@ -5,6 +5,7 @@ #include #include +#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" #define MAX_ARRAYS 1024 @@ -224,12 +225,7 @@ {NULL, NULL} /* Sentinel */ }; -/* platform independent*/ -#ifdef MS_WIN32 -__declspec(dllexport) -#endif - -void init_combine(void) +PyMODINIT_FUNC init_combine(void) { PyObject *m, *d; m = Py_InitModule("_combine", _combineMethods); From scipy-svn at scipy.org Fri May 18 18:45:20 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 18 May 2007 17:45:20 -0500 (CDT) Subject: [Scipy-svn] r3017 - in trunk/Lib/sandbox/timeseries: . plotlib tests Message-ID: <20070518224520.82E31C7C034@new.scipy.org> Author: pierregm Date: 2007-05-18 17:45:16 -0500 (Fri, 18 May 2007) New Revision: 3017 Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tests/test_timeseries.py trunk/Lib/sandbox/timeseries/tseries.py Log: mpl_timeseries : add self.format_dateaxis() to tsplot tdates : fixed the update of the cachedinfo in DateArray.__getitem__ tdates : added mehotd DateArray.__getslice__ tseries : added function align_with tseries : forced the dates to an empty DateArray in TimeSeries.__array_finalize__ tseries : cleaned unused variables Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-18 14:16:27 UTC (rev 3016) +++ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-18 22:45:16 UTC (rev 3017) @@ -719,6 +719,7 @@ parms = self._check_plot_params(*parms) self.legendlabels.append(kwargs.get('label',None)) Subplot.plot(self, *parms,**kwargs) + self.format_dateaxis() #............................................ def format_dateaxis(self,maj_spacing=None, min_spacing=None, strformat="%Y", rotate=True): Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-18 14:16:27 UTC (rev 3016) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-18 22:45:16 UTC (rev 3017) @@ -213,8 +213,10 @@ return def __getitem__(self, indx): + reset_full = True if isinstance(indx, Date): indx = self.find_dates(indx) + reset_full = False elif numeric.asarray(indx).dtype.kind == 'O': try: indx = self.find_dates(indx) @@ -231,12 +233,27 @@ return Date(self.freq, value=r.item()) else: if hasattr(r, '_cachedinfo'): - r._cachedinfo.update(dict(steps=None, full=None, hasdups=None)) - for attr in ('tostr','toobj','toord'): - if r._cachedinfo[attr] is not None: - r._cachedinfo[attr] = r._cachedinfo[attr][indx] + _cache = r._cachedinfo + _cache.update(dict([(k,_cache[k][indx]) + for k in ('toobj', 'tostr', 'toord') + if _cache[k] is not None])) + _cache['steps'] = None + if reset_full: + _cache['full'] = None + _cache['hasdups'] = None + return r + def __getslice__(self, i, j): + r = ndarray.__getslice__(self, i, j) + if hasattr(r, '_cachedinfo'): + _cache = r._cachedinfo + _cache.update(dict([(k,_cache[k][i:j]) + for k in ('toobj', 'tostr', 'toord') + if _cache[k] is not None])) + _cache['steps'] = None + return r + def __repr__(self): return ndarray.__repr__(self)[:-1] + \ ",\n freq='%s')" % self.freqstr @@ -676,7 +693,6 @@ hodie = today('D') D = DateArray(today('D')) assert_equal(D.freq, 6000) - if 0: freqs = [x[0] for x in corelib.freq_dict.values() if x[0] != 'U'] print freqs @@ -684,18 +700,20 @@ print f today = thisday(f) assert(Date(freq=f, value=today.value) == today) - - if 1: + if 0: D = date_array(freq='U', start_date=Date('U',1), length=10) - - if 1: + if 0: dlist = ['2007-01-%02i' % i for i in (1,2,4,5,7,8,10,11,13)] - - ords = numpy.fromiter((DateTimeFromString(s).toordinal() for s in dlist), float_) - - if 1: + if 0: "Tests the automatic sorting of dates." D = date_array_fromlist(dlist=['2006-01','2005-01','2004-01'],freq='M') assert_equal(D.view(ndarray), [24037, 24049, 24061]) + + if 1: + dlist = ['2007-%02i' % i for i in range(1,5)+range(7,13)] + mdates = date_array_fromlist(dlist, 'M') + + print mdates.tostr() + \ No newline at end of file Modified: trunk/Lib/sandbox/timeseries/tests/test_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-18 14:16:27 UTC (rev 3016) +++ trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-18 22:45:16 UTC (rev 3017) @@ -346,11 +346,23 @@ end_date=Date('D', string='2007-01-31')) assert_equal(dseries.size, 26) assert_equal(dseries._mask, N.r_[series._mask[5:], [1]*16]) + # + def test_alignseries(self): + "Tests align_series & align_with" + (series, data, dates) = self.d # empty_series = time_series([], freq='d') a, b = align_series(series, empty_series) assert_equal(a.start_date, b.start_date) assert_equal(a.end_date, b.end_date) + # + aseries = time_series(data, dates+10) + bseries = time_series(data, dates-10) + (a, b) = align_with(series, aseries, bseries) + assert_equal(a._dates, series._dates) + assert_equal(b._dates, series._dates) + assert_equal(a[-5:], series[:5]) + assert_equal(b[:5], series[-5:]) # def test_tshift(self): "Test tshift function" Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-18 14:16:27 UTC (rev 3016) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-18 22:45:16 UTC (rev 3017) @@ -366,7 +366,7 @@ #............................................ def __array_finalize__(self,obj): MaskedArray.__array_finalize__(self, obj) - self._dates = getattr(obj, '_dates', []) + self._dates = getattr(obj, '_dates', DateArray([])) self.observed = getattr(obj, 'observed', None) return #.................................. @@ -428,7 +428,6 @@ (sindx, dindx) = self.__checkindex(indx) newdata = numeric.array(self._series[sindx], copy=False, subok=True) newdate = self._dates[dindx] - m = self._mask singlepoint = (len(numeric.shape(newdate))==0) if singlepoint: newdate = DateArray(newdate) @@ -475,8 +474,7 @@ """ if self is masked: raise MAError, 'Cannot alter the masked element.' - (sindx, dindx) = self.__checkindex(indx) - #.... + (sindx, _) = self.__checkindex(indx) super(TimeSeries, self).__setitem__(sindx, value) #........................ def __getslice__(self, i, j): @@ -1163,7 +1161,7 @@ newseries[start_date:end_date] = a[start_date:end_date] newseries.copy_attributes(a) return newseries -#.................................................................... +#..................................................... def align_series(*series, **kwargs): """Aligns several TimeSeries, so that their starting and ending dates match. Series are resized and filled with mased values accordingly. @@ -1196,6 +1194,19 @@ return [adjust_endpoints(x, start_date, end_date) for x in series] aligned = align_series + +#..................................................... +def align_with(*series): + """Aligns several TimeSeries to the first of the list, so that their + starting and ending dates match. + Series are resized and filled with mased values accordingly. + """ + if len(series) < 2: + return series + dates = series[0]._dates[[0,-1]] + return [adjust_endpoints(x, dates[0], dates[-1]) for x in series[1:]] + + #.................................................................... def _convert1d(series, freq, func='auto', position='END'): """Converts a series to a frequency. Private function called by convert @@ -1538,56 +1549,17 @@ ################################################################################ if __name__ == '__main__': from maskedarray.testutils import assert_equal, assert_array_equal - import numpy as N - if 1: - dlist = ['2007-01-%02i' % i for i in range(1,11)] + dlist = ['2007-01-%02i' % i for i in range(1,16)] dates = date_array_fromlist(dlist) - data = masked_array(numeric.arange(10), mask=[1,0,0,0,0]*2, dtype=float_) + data = masked_array(numeric.arange(15), mask=[1,0,0,0,0]*3) + series = time_series(data, dlist) + # + aseries = time_series(data, dates+10) + bseries = time_series(data, dates-10) + (a, b) = align_with(series, aseries, bseries) + assert_equal(a._dates, series._dates) + assert_equal(b._dates, series._dates) + assert_equal(a[-5:], series[:5]) + assert_equal(b[:5], series[-5:]) - if 0: - ser1d = time_series(data, dlist) - - serfolded = ser1d.reshape((5,2)) - assert_equal(serfolded._dates.shape, (5,2)) - assert_equal(serfolded[0], time_series([0,1],mask=[1,0], - start_date=dates[0])) - assert_equal(serfolded[:,0], - time_series(ser1d[::2], dates=dates[::2])) - sertrans = serfolded.transpose() - assert_equal(sertrans.shape, (2,5)) - - if 1: - data = dates - series = time_series(data, dates) - assert(isinstance(series, TimeSeries)) - assert_equal(series._dates, dates) - assert_equal(series._data, data) - assert_equal(series.freqstr, 'D') - - series[5] = MA.masked - - # ensure that series can be represented by a string after masking a value - # (there was a bug before that prevented this from working when using a - # DateArray for the data) - strrep = str(series) - - if 0: - series = time_series(numpy.arange(1,501), - start_date=Date('D', string='2007-01-01')) - mseries = convert(series, 'M') - aseries = convert(mseries, 'A') - (freq, func, position) = ('A', None, 'END') - - tmp = mseries[:,0].convert('A') - aseries = MA.concatenate([_convert1d(m,'A')._series for m in mseries.split()], - axis=-1).view(type(series)) - aseries._dates = tmp._dates - shp = aseries.shape - aseries.shape = (shp[0], shp[-1]//tmp.shape[-1], tmp.shape[-1]) - numpy.swapaxes(aseries,1,2) - - if 1: - series = time_series(N.arange(124).reshape(62,2), - start_date=Date(freq='d', year=2005, month=7, day=1)) - assert_equal(series.convert('M',sum), [[930,961],[2852,2883]]) \ No newline at end of file From scipy-svn at scipy.org Sat May 19 14:06:13 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 19 May 2007 13:06:13 -0500 (CDT) Subject: [Scipy-svn] r3018 - in trunk/Lib/stsci: convolve/src image/src Message-ID: <20070519180613.9029239C11D@new.scipy.org> Author: pearu Date: 2007-05-19 13:06:05 -0500 (Sat, 19 May 2007) New Revision: 3018 Modified: trunk/Lib/stsci/convolve/src/_correlatemodule.c trunk/Lib/stsci/convolve/src/_lineshapemodule.c trunk/Lib/stsci/image/src/_combinemodule.c Log: Removed NO_IMPORT_ARRAY defines to fix unknown symbol errors with MSVC compiler. Modified: trunk/Lib/stsci/convolve/src/_correlatemodule.c =================================================================== --- trunk/Lib/stsci/convolve/src/_correlatemodule.c 2007-05-18 22:45:16 UTC (rev 3017) +++ trunk/Lib/stsci/convolve/src/_correlatemodule.c 2007-05-19 18:06:05 UTC (rev 3018) @@ -5,7 +5,6 @@ #include #include -#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" typedef enum Modified: trunk/Lib/stsci/convolve/src/_lineshapemodule.c =================================================================== --- trunk/Lib/stsci/convolve/src/_lineshapemodule.c 2007-05-18 22:45:16 UTC (rev 3017) +++ trunk/Lib/stsci/convolve/src/_lineshapemodule.c 2007-05-19 18:06:05 UTC (rev 3018) @@ -29,7 +29,6 @@ #include -#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" Modified: trunk/Lib/stsci/image/src/_combinemodule.c =================================================================== --- trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-18 22:45:16 UTC (rev 3017) +++ trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-19 18:06:05 UTC (rev 3018) @@ -5,7 +5,6 @@ #include #include -#define NO_IMPORT_ARRAY #include "numpy/libnumarray.h" #define MAX_ARRAYS 1024 From scipy-svn at scipy.org Sat May 19 18:04:29 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 19 May 2007 17:04:29 -0500 (CDT) Subject: [Scipy-svn] r3019 - trunk/Lib/io/tests Message-ID: <20070519220429.737EA39C0AD@new.scipy.org> Author: pearu Date: 2007-05-19 17:04:23 -0500 (Sat, 19 May 2007) New Revision: 3019 Modified: trunk/Lib/io/tests/test_npfile.py Log: Fix io.npfile test for win32 platform. Modified: trunk/Lib/io/tests/test_npfile.py =================================================================== --- trunk/Lib/io/tests/test_npfile.py 2007-05-19 18:06:05 UTC (rev 3018) +++ trunk/Lib/io/tests/test_npfile.py 2007-05-19 22:04:23 UTC (rev 3019) @@ -12,6 +12,7 @@ def test_init(self): fd, fname = mkstemp() + os.close(fd) npf = npfile(fname) arr = N.reshape(N.arange(10), (5,2)) self.assertRaises(IOError, npf.write_array, arr) From scipy-svn at scipy.org Sun May 20 16:41:14 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 20 May 2007 15:41:14 -0500 (CDT) Subject: [Scipy-svn] r3020 - trunk/Lib/fftpack/src Message-ID: <20070520204114.6C42439C0B0@new.scipy.org> Author: cookedm Date: 2007-05-20 15:41:10 -0500 (Sun, 20 May 2007) New Revision: 3020 Added: trunk/Lib/fftpack/src/zfft_djbfft.c trunk/Lib/fftpack/src/zfft_fftpack.c trunk/Lib/fftpack/src/zfft_fftw.c trunk/Lib/fftpack/src/zfft_fftw3.c trunk/Lib/fftpack/src/zfft_mkl.c Modified: trunk/Lib/fftpack/src/zfft.c Log: #408: v2 of David Cournapeau's patch to clean up zfft Modified: trunk/Lib/fftpack/src/zfft.c =================================================================== --- trunk/Lib/fftpack/src/zfft.c 2007-05-19 22:04:23 UTC (rev 3019) +++ trunk/Lib/fftpack/src/zfft.c 2007-05-20 20:41:10 UTC (rev 3020) @@ -6,267 +6,71 @@ #include "fftpack.h" -/**************** FFTWORK *****************************/ +/* The following macro convert private backend specific function to the public + * functions exported by the module */ +#define GEN_PUBLIC_API(name) \ +void destroy_zfft_cache(void)\ +{\ + destroy_z##name##_caches();\ +}\ +\ +void zfft(complex_double *inout, int n, \ + int direction, int howmany, int normalize)\ +{\ + zfft_##name(inout, n, direction, howmany, normalize);\ +} -#ifdef WITH_FFTWORK -GEN_CACHE(zfftwork,(int n) - ,coef_dbl* coef; - ,caches_zfftwork[i].n==n - ,caches_zfftwork[id].coef = (coef_dbl*)malloc(sizeof(coef_dbl)*(n)); - fft_coef_dbl(caches_zfftwork[id].coef,n); - ,free(caches_zfftwork[id].coef); - ,10) -#endif +/* ************** Definition of backend specific functions ********* */ -/**************** DJBFFT *****************************/ -#ifndef WITH_MKL -#ifdef WITH_DJBFFT -GEN_CACHE(zdjbfft,(int n) - ,unsigned int* f; - double* ptr; - ,caches_zdjbfft[i].n==n - ,caches_zdjbfft[id].f = (unsigned int*)malloc(sizeof(unsigned int)*(n)); - caches_zdjbfft[id].ptr = (double*)malloc(sizeof(double)*(2*n)); - fftfreq_ctable(caches_zdjbfft[id].f,n); - for(i=0;i0?FFTW_FORWARD:FFTW_BACKWARD), - FFTW_ESTIMATE); - ,fftw_destroy_plan(caches_zfftw[id].plan); - fftw_free(caches_zfftw[id].ptr); - ,10) - -#elif defined WITH_FFTW -/**************** FFTW2 *****************************/ -GEN_CACHE(zfftw,(int n,int d) - ,int direction; - fftw_plan plan; - ,((caches_zfftw[i].n==n) && - (caches_zfftw[i].direction==d)) - ,caches_zfftw[id].direction = d; - caches_zfftw[id].plan = fftw_create_plan(n, - (d>0?FFTW_FORWARD:FFTW_BACKWARD), - FFTW_IN_PLACE|FFTW_ESTIMATE); - ,fftw_destroy_plan(caches_zfftw[id].plan); - ,10) -#else -/**************** FFTPACK ZFFT **********************/ -extern void F_FUNC(zfftf,ZFFTF)(int*,double*,double*); -extern void F_FUNC(zfftb,ZFFTB)(int*,double*,double*); -extern void F_FUNC(zffti,ZFFTI)(int*,double*); -GEN_CACHE(zfftpack,(int n) - ,double* wsave; - ,(caches_zfftpack[i].n==n) - ,caches_zfftpack[id].wsave = (double*)malloc(sizeof(double)*(4*n+15)); - F_FUNC(zffti,ZFFTI)(&n,caches_zfftpack[id].wsave); - ,free(caches_zfftpack[id].wsave); - ,10) -#endif - -extern void destroy_zfft_cache(void) { -#ifdef WITH_FFTWORK - destroy_zfftwork_caches(); -#endif -#ifndef WITH_MKL -#ifdef WITH_DJBFFT - destroy_zdjbfft_caches(); -#endif -#endif -#ifdef WITH_MKL - destroy_zmklfft_caches(); -#elif defined WITH_FFTW3 - destroy_zfftw_caches(); -#elif defined WITH_FFTW - destroy_zfftw_caches(); -#else - destroy_zfftpack_caches(); -#endif -} - -/**************** ZFFT function **********************/ -extern void zfft(complex_double *inout, - int n,int direction,int howmany,int normalize) { - int i; - complex_double *ptr = inout; -#ifndef WITH_MKL #ifdef WITH_FFTW3 - fftw_complex *ptrm = NULL; -#endif -#if defined(WITH_FFTW) || defined(WITH_FFTW3) - fftw_plan plan = NULL; -#endif -#endif -#if defined WITH_MKL - DFTI_DESCRIPTOR_HANDLE desc_handle; -#else - double* wsave = NULL; -#endif -#ifdef WITH_FFTWORK - coef_dbl* coef = NULL; -#endif -#ifndef WITH_MKL -#ifdef WITH_DJBFFT - int j; - complex_double *ptrc = NULL; - unsigned int *f = NULL; -#endif -#endif -#ifdef WITH_FFTWORK - if (ispow2le2e30(n)) { - i = get_cache_id_zfftwork(n); - coef = caches_zfftwork[i].coef; - } else -#endif -#ifndef WITH_MKL -#ifdef WITH_DJBFFT - switch (n) { - case 2:;case 4:;case 8:;case 16:;case 32:;case 64:;case 128:;case 256:; - case 512:;case 1024:;case 2048:;case 4096:;case 8192: - i = get_cache_id_zdjbfft(n); - f = caches_zdjbfft[i].f; - ptrc = (complex_double*)caches_zdjbfft[i].ptr; - } - if (f==0) -#endif -#endif -#ifdef WITH_MKL - desc_handle = caches_zmklfft[get_cache_id_zmklfft(n)].desc_handle; -#elif defined WITH_FFTW3 - plan = caches_zfftw[get_cache_id_zfftw(n,direction)].plan; + #include "zfft_fftw3.c" + #ifndef WITH_DJBFFT + GEN_PUBLIC_API(fftw3) + #endif #elif defined WITH_FFTW - plan = caches_zfftw[get_cache_id_zfftw(n,direction)].plan; -#else - wsave = caches_zfftpack[get_cache_id_zfftpack(n)].wsave; + #include "zfft_fftw.c" + #ifndef WITH_DJBFFT + GEN_PUBLIC_API(fftw) + #endif +#elif defined WITH_MKL + #include "zfft_mkl.c" + #ifndef WITH_DJBFFT + GEN_PUBLIC_API(mkl) + #endif +#else /* Use fftpack by default */ + #include "zfft_fftpack.c" + #ifndef WITH_DJBFFT + GEN_PUBLIC_API(fftpack) + #endif #endif - switch (direction) { - - case 1: - for (i=0;i=0;--i) { - *((double*)(ptr)) /= n; - *((double*)(ptr++)+1) /= n; - } - } -} Added: trunk/Lib/fftpack/src/zfft_djbfft.c =================================================================== --- trunk/Lib/fftpack/src/zfft_djbfft.c 2007-05-19 22:04:23 UTC (rev 3019) +++ trunk/Lib/fftpack/src/zfft_djbfft.c 2007-05-20 20:41:10 UTC (rev 3020) @@ -0,0 +1,151 @@ +/* +* DJBFFT only implements size 2^N ! +* +* zfft_def and zfft_def_destroy_cache are the functions +* used for size different than 2^N +*/ +#ifdef WITH_FFTWORK +#define zfft_def zfft_fftwork +#define zfft_def_destroy_cache destroy_zfftwork_cache +#elif defined WITH_FFTW3 +#define zfft_def zfft_fftw3 +#define zfft_def_destroy_cache destroy_zfftw3_caches +#elif defined WITH_FFTW +#define zfft_def zfft_fftw +#define zfft_def_destroy_cache destroy_zfftw_caches +#else +#define zfft_def zfft_fftpack +#define zfft_def_destroy_cache destroy_zfftpack_caches +#endif + +GEN_CACHE(zdjbfft,(int n) + ,unsigned int* f; + double* ptr; + ,caches_zdjbfft[i].n==n + ,caches_zdjbfft[id].f = (unsigned int*)malloc(sizeof(unsigned int)*(n)); + caches_zdjbfft[id].ptr = (double*)malloc(sizeof(double)*(2*n)); + fftfreq_ctable(caches_zdjbfft[id].f,n); + for(i=0;i= 0; --i) { + *((double *) (ptr)) /= n; + *((double *) (ptr++) + 1) /= n; + } + } +} Added: trunk/Lib/fftpack/src/zfft_fftw.c =================================================================== --- trunk/Lib/fftpack/src/zfft_fftw.c 2007-05-19 22:04:23 UTC (rev 3019) +++ trunk/Lib/fftpack/src/zfft_fftw.c 2007-05-20 20:41:10 UTC (rev 3020) @@ -0,0 +1,43 @@ +GEN_CACHE(zfftw,(int n,int d) + ,int direction; + fftw_plan plan; + ,((caches_zfftw[i].n==n) && + (caches_zfftw[i].direction==d)) + ,caches_zfftw[id].direction = d; + caches_zfftw[id].plan = fftw_create_plan(n, + (d>0?FFTW_FORWARD:FFTW_BACKWARD), + FFTW_IN_PLACE|FFTW_ESTIMATE); + ,fftw_destroy_plan(caches_zfftw[id].plan); + ,10) + +extern void zfft_fftw(complex_double * inout, int n, + int dir, int howmany, int normalize) +{ + int i; + complex_double *ptr = inout; + fftw_plan plan = NULL; + plan = caches_zfftw[get_cache_id_zfftw(n, dir)].plan; + + switch (dir) { + case 1: + for (i = 0; i < howmany; ++i, ptr += n) { + fftw_one(plan, (fftw_complex *) ptr, NULL); + } + break; + case -1: + for (i = 0; i < howmany; ++i, ptr += n) { + fftw_one(plan, (fftw_complex *) ptr, NULL); + } + break; + default: + fprintf(stderr, "zfft: invalid dir=%d\n", dir); + } + + if (normalize) { + ptr = inout; + for (i = n * howmany - 1; i >= 0; --i) { + *((double *) (ptr)) /= n; + *((double *) (ptr++) + 1) /= n; + } + } +} Added: trunk/Lib/fftpack/src/zfft_fftw3.c =================================================================== --- trunk/Lib/fftpack/src/zfft_fftw3.c 2007-05-19 22:04:23 UTC (rev 3019) +++ trunk/Lib/fftpack/src/zfft_fftw3.c 2007-05-20 20:41:10 UTC (rev 3020) @@ -0,0 +1,60 @@ +GEN_CACHE(zfftw3,(int n,int d) + ,int direction; + fftw_plan plan; + fftw_complex* ptr; + ,((caches_zfftw3[i].n==n) && + (caches_zfftw3[i].direction==d)) + ,caches_zfftw3[id].direction = d; + caches_zfftw3[id].ptr = fftw_malloc(sizeof(fftw_complex)*(n)); + caches_zfftw3[id].plan = fftw_plan_dft_1d(n, caches_zfftw3[id].ptr, + caches_zfftw3[id].ptr, + (d>0?FFTW_FORWARD:FFTW_BACKWARD), + FFTW_ESTIMATE); + ,fftw_destroy_plan(caches_zfftw3[id].plan); + fftw_free(caches_zfftw3[id].ptr); + ,10) + +static void zfft_fftw3(complex_double * inout, int n, int dir, int +howmany, int normalize) +{ + complex_double *ptr = inout; + fftw_complex *ptrm = NULL; + fftw_plan plan = NULL; + + int i; + + plan = caches_zfftw3[get_cache_id_zfftw3(n, dir)].plan; + + switch (dir) { + case 1: + for (i = 0; i < howmany; ++i, ptr += n) { + ptrm = + caches_zfftw3[get_cache_id_zfftw3(n, dir)].ptr; + memcpy(ptrm, ptr, sizeof(double) * 2 * n); + fftw_execute(plan); + memcpy(ptr, ptrm, sizeof(double) * 2 * n); + } + break; + + case -1: + for (i = 0; i < howmany; ++i, ptr += n) { + ptrm = + caches_zfftw3[get_cache_id_zfftw3(n, dir)].ptr; + memcpy(ptrm, ptr, sizeof(double) * 2 * n); + fftw_execute(plan); + memcpy(ptr, ptrm, sizeof(double) * 2 * n); + } + break; + + default: + fprintf(stderr, "zfft: invalid dir=%d\n", dir); + } + + if (normalize) { + ptr = inout; + for (i = n * howmany - 1; i >= 0; --i) { + *((double *) (ptr)) /= n; + *((double *) (ptr++) + 1) /= n; + } + } +} Added: trunk/Lib/fftpack/src/zfft_mkl.c =================================================================== --- trunk/Lib/fftpack/src/zfft_mkl.c 2007-05-19 22:04:23 UTC (rev 3019) +++ trunk/Lib/fftpack/src/zfft_mkl.c 2007-05-20 20:41:10 UTC (rev 3020) @@ -0,0 +1,42 @@ +GEN_CACHE(zmklfft,(int n) + ,DFTI_DESCRIPTOR_HANDLE desc_handle; + ,(caches_zmklfft[i].n==n) + ,DftiCreateDescriptor(&caches_zmklfft[id].desc_handle, DFTI_DOUBLE, DFTI_COMPLEX, 1, (long)n); + DftiCommitDescriptor(caches_zmklfft[id].desc_handle); + ,DftiFreeDescriptor(&caches_zmklfft[id].desc_handle); + ,10) + +static void zfft_mkl(complex_double * inout, + int n, int direction, int howmany, int normalize) +{ + int i; + complex_double *ptr = inout; + DFTI_DESCRIPTOR_HANDLE desc_handle; + desc_handle = caches_zmklfft[get_cache_id_zmklfft(n)].desc_handle; + + switch (direction) { + + case 1: + for (i = 0; i < howmany; ++i, ptr += n) { + DftiComputeForward(desc_handle, (double *) ptr); + } + break; + + case -1: + for (i = 0; i < howmany; ++i, ptr += n) { + DftiComputeBackward(desc_handle, (double *) ptr); + } + break; + + default: + fprintf(stderr, "zfft: invalid direction=%d\n", direction); + } + + if (normalize) { + ptr = inout; + for (i = n * howmany - 1; i >= 0; --i) { + *((double *) (ptr)) /= n; + *((double *) (ptr++) + 1) /= n; + } + } +} From scipy-svn at scipy.org Mon May 21 05:12:27 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 04:12:27 -0500 (CDT) Subject: [Scipy-svn] r3021 - trunk/Lib/signal Message-ID: <20070521091227.A55CB39C09D@new.scipy.org> Author: stefan Date: 2007-05-21 04:12:07 -0500 (Mon, 21 May 2007) New Revision: 3021 Modified: trunk/Lib/signal/filter_design.py Log: Default value a=1 for freqz. Modified: trunk/Lib/signal/filter_design.py =================================================================== --- trunk/Lib/signal/filter_design.py 2007-05-20 20:41:10 UTC (rev 3020) +++ trunk/Lib/signal/filter_design.py 2007-05-21 09:12:07 UTC (rev 3021) @@ -67,7 +67,7 @@ plot(w, h) return w, h -def freqz(b, a, worN=None, whole=0, plot=None): +def freqz(b, a=1, worN=None, whole=0, plot=None): """Compute frequency response of a digital filter. Description: @@ -95,6 +95,7 @@ w -- The frequencies at which h was computed. h -- The frequency response. + """ b, a = map(atleast_1d, (b,a)) if whole: From scipy-svn at scipy.org Mon May 21 10:09:54 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 09:09:54 -0500 (CDT) Subject: [Scipy-svn] r3022 - trunk/Lib/fftpack/src Message-ID: <20070521140954.BD8D539C00C@new.scipy.org> Author: pearu Date: 2007-05-21 09:09:45 -0500 (Mon, 21 May 2007) New Revision: 3022 Modified: trunk/Lib/fftpack/src/zfft.c Log: Fix typo. Modified: trunk/Lib/fftpack/src/zfft.c =================================================================== --- trunk/Lib/fftpack/src/zfft.c 2007-05-21 09:12:07 UTC (rev 3021) +++ trunk/Lib/fftpack/src/zfft.c 2007-05-21 14:09:45 UTC (rev 3022) @@ -48,7 +48,7 @@ #elif defined WITH_MKL #include "zfft_mkl.c" #ifndef WITH_DJBFFT - GEN_PUBLIC_API(mkl) + GEN_PUBLIC_API(mklfft) #endif #else /* Use fftpack by default */ #include "zfft_fftpack.c" From scipy-svn at scipy.org Mon May 21 10:15:11 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 09:15:11 -0500 (CDT) Subject: [Scipy-svn] r3023 - trunk/Lib/fftpack/src Message-ID: <20070521141511.4735639C04C@new.scipy.org> Author: pearu Date: 2007-05-21 09:15:05 -0500 (Mon, 21 May 2007) New Revision: 3023 Modified: trunk/Lib/fftpack/src/zfft.c trunk/Lib/fftpack/src/zfft_mkl.c Log: Resolved previous typo fix differently. Modified: trunk/Lib/fftpack/src/zfft.c =================================================================== --- trunk/Lib/fftpack/src/zfft.c 2007-05-21 14:09:45 UTC (rev 3022) +++ trunk/Lib/fftpack/src/zfft.c 2007-05-21 14:15:05 UTC (rev 3023) @@ -48,7 +48,7 @@ #elif defined WITH_MKL #include "zfft_mkl.c" #ifndef WITH_DJBFFT - GEN_PUBLIC_API(mklfft) + GEN_PUBLIC_API(mkl) #endif #else /* Use fftpack by default */ #include "zfft_fftpack.c" Modified: trunk/Lib/fftpack/src/zfft_mkl.c =================================================================== --- trunk/Lib/fftpack/src/zfft_mkl.c 2007-05-21 14:09:45 UTC (rev 3022) +++ trunk/Lib/fftpack/src/zfft_mkl.c 2007-05-21 14:15:05 UTC (rev 3023) @@ -1,9 +1,9 @@ -GEN_CACHE(zmklfft,(int n) +GEN_CACHE(zmkl,(int n) ,DFTI_DESCRIPTOR_HANDLE desc_handle; - ,(caches_zmklfft[i].n==n) - ,DftiCreateDescriptor(&caches_zmklfft[id].desc_handle, DFTI_DOUBLE, DFTI_COMPLEX, 1, (long)n); - DftiCommitDescriptor(caches_zmklfft[id].desc_handle); - ,DftiFreeDescriptor(&caches_zmklfft[id].desc_handle); + ,(caches_zmkl[i].n==n) + ,DftiCreateDescriptor(&caches_zmkl[id].desc_handle, DFTI_DOUBLE, DFTI_COMPLEX, 1, (long)n); + DftiCommitDescriptor(caches_zmkl[id].desc_handle); + ,DftiFreeDescriptor(&caches_zmkl[id].desc_handle); ,10) static void zfft_mkl(complex_double * inout, @@ -12,7 +12,7 @@ int i; complex_double *ptr = inout; DFTI_DESCRIPTOR_HANDLE desc_handle; - desc_handle = caches_zmklfft[get_cache_id_zmklfft(n)].desc_handle; + desc_handle = caches_zmkl[get_cache_id_zmkl(n)].desc_handle; switch (direction) { From scipy-svn at scipy.org Mon May 21 14:35:57 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 13:35:57 -0500 (CDT) Subject: [Scipy-svn] r3024 - trunk/Lib/sandbox/timeseries Message-ID: <20070521183557.7B6EE39C02F@new.scipy.org> Author: pierregm Date: 2007-05-21 13:35:53 -0500 (Mon, 21 May 2007) New Revision: 3024 Modified: trunk/Lib/sandbox/timeseries/tseries.py Log: tseries : forced a copy_attributes in _tsarraymethods (in order to save the attributes) Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-21 14:15:05 UTC (rev 3023) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-21 18:35:53 UTC (rev 3024) @@ -268,6 +268,7 @@ result._dates = getattr(instance._dates, _name)(*args) else: result._dates = instance._dates + result.copy_attributes(instance) return result class _tsaxismethod(object): From scipy-svn at scipy.org Mon May 21 20:10:01 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 19:10:01 -0500 (CDT) Subject: [Scipy-svn] r3025 - trunk/Lib/fftpack Message-ID: <20070522001001.15A4039C03B@new.scipy.org> Author: rkern Date: 2007-05-21 19:09:59 -0500 (Mon, 21 May 2007) New Revision: 3025 Modified: trunk/Lib/fftpack/setup.py Log: The src/zfft_*.c files need to be added to the dependency list; otherwise, they will not be picked up in an sdist. Modified: trunk/Lib/fftpack/setup.py =================================================================== --- trunk/Lib/fftpack/setup.py 2007-05-21 18:35:53 UTC (rev 3024) +++ trunk/Lib/fftpack/setup.py 2007-05-22 00:09:59 UTC (rev 3025) @@ -27,16 +27,18 @@ 'src/zfftnd.c'] config.add_extension('_fftpack', - sources=sources, - libraries=['dfftpack'], - extra_info = [fft_opt_info, djbfft_info], - ) + sources=sources, + libraries=['dfftpack'], + extra_info=[fft_opt_info, djbfft_info], + depends=['src/zfft_djbfft.c', 'src/zfft_fftpack.c', 'src/zfft_fftw.c', + 'src/zfft_fftw3.c', 'src/zfft_mkl.c'], + ) config.add_extension('convolve', - sources = ['convolve.pyf','src/convolve.c'], - libraries = ['dfftpack'], - extra_info = [fft_opt_info, djbfft_info], - ) + sources=['convolve.pyf','src/convolve.c'], + libraries=['dfftpack'], + extra_info=[fft_opt_info, djbfft_info], + ) return config if __name__ == '__main__': From scipy-svn at scipy.org Mon May 21 21:49:53 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 21 May 2007 20:49:53 -0500 (CDT) Subject: [Scipy-svn] r3026 - in trunk/Lib: interpolate io Message-ID: <20070522014953.08C2C39C064@new.scipy.org> Author: oliphant Date: 2007-05-21 20:49:39 -0500 (Mon, 21 May 2007) New Revision: 3026 Modified: trunk/Lib/interpolate/interpolate.py trunk/Lib/io/netcdf.py Log: Fix up some spline work. Add place-holder for netcdf write capability. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-22 00:09:59 UTC (rev 3025) +++ trunk/Lib/interpolate/interpolate.py 2007-05-22 01:49:39 UTC (rev 3026) @@ -553,7 +553,7 @@ return ppform([c3,c2,c1,c0], xk) def splmake(xk,yk,order=3,kind='not-a-knot',conds=None): - """Return an (mk,xk,yk) representation of a spline given + """Return an (mk,xk,yk,order) representation of a spline given data-points yk can be an N-d array to represent more than one curve, through @@ -603,9 +603,9 @@ def spltopp(mk,xk,yk,order=3): return eval('_sp%dtopp'%order)(mk,xk,yk) -def spline(xk,yk,xnew,order=3,kwds='not-a-knot',conds=None): +def spline(xk,yk,xnew,order=3,kind='not-a-knot',conds=None): func = eval('_sp%deval'%order) - return func(splmake(xk,yk,order=order,kind=kind,conds=conds),xnew) + return func(splmake(xk,yk,order=order,kind=kind,conds=conds)[:-1],xnew) def _sp2topp(zk,xk,yk): dk = xk[1:]-xk[:-1] @@ -626,7 +626,7 @@ res *= d res += zk0 res *= d - res += wk[indxs] + res += yk[indxs] return res def _sp4topp(mk,xk,yk): Modified: trunk/Lib/io/netcdf.py =================================================================== --- trunk/Lib/io/netcdf.py 2007-05-22 00:09:59 UTC (rev 3025) +++ trunk/Lib/io/netcdf.py 2007-05-22 01:49:39 UTC (rev 3026) @@ -37,10 +37,29 @@ class netcdf_file(object): """A NetCDF file parser.""" - def __init__(self, file): - self._buffer = open(file, 'rb') - self._parse() + def __init__(self, file, mode): + mode += 'b' + self._buffer = open(file, mode) + if mode in ['rb', 'r+b']: + self._parse() + elif mode == 'ab': + raise NotImplementedError + def flush(self): + pass + + def sync(self): + pass + + def close(self): + pass + + def create_dimension(self, name, length): + pass + + def create_variable(self, name, type, dimensions): + pass + def read(self, size=-1): """Alias for reading the file buffer.""" return self._buffer.read(size) @@ -225,6 +244,11 @@ if isrec: # Record variables are not stored contiguosly on disk, so we # need to create a separate array for each record. + # + # TEO: This will copy data from the newly-created array + # into the __array_data__ region, thus removing any benefit of using + # a memory-mapped file. You might as well just read the data + # in directly. self.__array_data__ = zeros(shape, dtype) bytes += (shape[0] - 1) * recsize for n in range(shape[0]): @@ -250,6 +274,10 @@ """For scalars.""" return self.__array_data__.item() + def assignValue(self, value): + """For scalars.""" + self.__array_data__.itemset(value) + def typecode(self): return ['b', 'c', 'h', 'i', 'f', 'd'][self._nc_type-1] From scipy-svn at scipy.org Tue May 22 03:15:39 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 02:15:39 -0500 (CDT) Subject: [Scipy-svn] r3027 - trunk/Lib/interpolate Message-ID: <20070522071539.64CA039C033@new.scipy.org> Author: oliphant Date: 2007-05-22 02:15:29 -0500 (Tue, 22 May 2007) New Revision: 3027 Modified: trunk/Lib/interpolate/interpolate.py Log: Add smoothest option for interpolation --- finds interpolant that minimizes the discontinuity. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-22 01:49:39 UTC (rev 3026) +++ trunk/Lib/interpolate/interpolate.py 2007-05-22 07:15:29 UTC (rev 3027) @@ -9,6 +9,7 @@ logical_or, atleast_1d, atleast_2d, meshgrid, ravel import numpy as np import scipy.linalg as slin +import math import fitpack @@ -352,6 +353,57 @@ res = array([np.dot(V[k,:],pp[:,indxs[k]]) for k in xrange(len(xnew))]) return res +def _setdiag(a, k, v): + assert (a.ndim==2) + M,N = a.shape + if k > 0: + start = k + num = N-k + else: + num = M+k + start = abs(k)*N + end = start + num*(N+1)-1 + a.flat[start:end:(N+1)] = v + +# Return the spline that minimizes the dis-continuity of the +# "order-th" derivative; for order >= 2. + +def _find_smoothest2(xk, yk): + N = len(xk)-1 + Np1 = N+1 + # find pseudo-inverse of B directly. + Bd = np.empty((Np1,N)) + for k in range(-N,N): + if (k<0): + l = np.arange(-k,Np1) + v = (l+k+1) + if ((k+1) % 2): + v = -v + else: + l = np.arange(k,N) + v = N-l + if ((k % 2)): + v = -v + _setdiag(Bd,k,v) + Bd /= (Np1) + V2 = np.ones((Np1,)) + V2[1::2] = -1 + V2 /= math.sqrt(Np1) + dk = np.diff(xk) + b = 2*np.diff(yk)/dk + J = np.zeros((N-1,N+1)) + idk = 1.0/dk + _setdiag(J,0,idk[:-1]) + _setdiag(J,1,-idk[1:]-idk[:-1]) + _setdiag(J,2,idk[1:]) + A = np.dot(J.T,J) + val = np.dot(V2,np.dot(A,V2)) + res1 = np.dot(np.outer(V2,V2)/val,A) + mk = np.dot(np.eye(Np1)-res1,np.dot(Bd,b)) + return mk + + + def _get_spline2_Bb(xk, yk, kind, conds): Np1 = len(xk) dk = xk[1:]-xk[:-1] @@ -552,6 +604,7 @@ c0 += (yk[:-1]*xk[1:] - yk[1:]*xk[:-1])/dk return ppform([c3,c2,c1,c0], xk) + def splmake(xk,yk,order=3,kind='not-a-knot',conds=None): """Return an (mk,xk,yk,order) representation of a spline given data-points @@ -562,7 +615,7 @@ kind can be 'natural', 'second', 'first', 'clamped', 'endslope', 'periodic', 'symmetric', 'parabolic', 'not-a-knot', - 'runout' + 'runout', 'smoothest' for 'second', and 'first' conditions can be given which should be the desired second and first derivatives at @@ -577,11 +630,17 @@ if order < 2: raise ValueError("order cannot be negative") + if kind == 'smoothest': + func = eval('_find_smoothest%d' % order) + mk = func(xk,yk) + return mk, xk, yk, order + try: func = eval('_get_spline%d_Bb'%order) except NameError: raise ValueError("order %d not available" % order) + B,b,exfunc,nlu = func(xk, yk, kind, conds) if nlu is None: @@ -597,13 +656,19 @@ return mk, xk, yk, order def spleval((mk,xk,yk,order),xnew): + """Evaluate a spline represented by a tuple at the new x-values. + """ func = eval('_sp%deval'%order) return func((mk,xk,yk),xnew) def spltopp(mk,xk,yk,order=3): + """Return a piece-wise polynomial object from a spline tuple. + """ return eval('_sp%dtopp'%order)(mk,xk,yk) def spline(xk,yk,xnew,order=3,kind='not-a-knot',conds=None): + """Interpolate a curve (xk,yk) at points xnew using a spline fit. + """ func = eval('_sp%deval'%order) return func(splmake(xk,yk,order=order,kind=kind,conds=conds)[:-1],xnew) From scipy-svn at scipy.org Tue May 22 05:19:19 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 04:19:19 -0500 (CDT) Subject: [Scipy-svn] r3028 - trunk/Lib/interpolate Message-ID: <20070522091919.EA4D139C02A@new.scipy.org> Author: oliphant Date: 2007-05-22 04:19:14 -0500 (Tue, 22 May 2007) New Revision: 3028 Modified: trunk/Lib/interpolate/interpolate.py Log: Add smoothest interpolation. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-22 07:15:29 UTC (rev 3027) +++ trunk/Lib/interpolate/interpolate.py 2007-05-22 09:19:14 UTC (rev 3028) @@ -401,8 +401,35 @@ res1 = np.dot(np.outer(V2,V2)/val,A) mk = np.dot(np.eye(Np1)-res1,np.dot(Bd,b)) return mk + +def _find_smoothest3(xk, yk): + N = len(xk)-1 + Np1 = N+1 + Nm1 = N-1 + dk = np.diff(xk) + # find B and then take pseudo-inverse + B = np.zeros((Nm1,Np1)) + _setdiag(B,0,dk[:-1]) + _setdiag(B,1,2*(dk[1:]+dk[:-1])) + _setdiag(B,2,dk[1:]) + u,s,vh = np.dual.svd(B) + V2 = vh[-2:,:].T + Bd = np.dot(vh[:-2,:].T, np.dot(diag(1.0/s),u.T)) + b0 = np.diff(yk)/dk + b = 6*np.diff(b0) + J = np.zeros((N-1,N+1)) + idk = 1.0/dk + _setdiag(J,0,idk[:-1]) + _setdiag(J,1,-idk[1:]-idk[:-1]) + _setdiag(J,2,idk[1:]) + A = np.dot(J.T,J) + sub = np.dot(V2.T,np.dot(A,V2)) + subi = np.linalg.inv(sub) + res0 = np.dot(V2,subi) + res1 = np.dot(res0,np.dot(V2.T,A)) + mk = np.dot(np.eye(Np1)-res1,np.dot(Bd,b)) + return mk - def _get_spline2_Bb(xk, yk, kind, conds): Np1 = len(xk) From scipy-svn at scipy.org Tue May 22 05:56:16 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 04:56:16 -0500 (CDT) Subject: [Scipy-svn] r3029 - in trunk/Lib/sandbox: . lobpcg Message-ID: <20070522095616.90BFE39C02A@new.scipy.org> Author: rc Date: 2007-05-22 04:56:10 -0500 (Tue, 22 May 2007) New Revision: 3029 Added: trunk/Lib/sandbox/lobpcg/ trunk/Lib/sandbox/lobpcg/README trunk/Lib/sandbox/lobpcg/X.txt trunk/Lib/sandbox/lobpcg/__init__.py trunk/Lib/sandbox/lobpcg/info.py trunk/Lib/sandbox/lobpcg/lobpcg.py trunk/Lib/sandbox/lobpcg/setup.py Log: initial release Added: trunk/Lib/sandbox/lobpcg/README =================================================================== --- trunk/Lib/sandbox/lobpcg/README 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/README 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,12 @@ +Pure SciPy implementation of Locally Optimal Block Preconditioned Conjugate +Gradient Method (LOBPCG), see +http://www-math.cudenver.edu/~aknyazev/software/BLOPEX/ + +lobpcg.py code was written by Robert Cimrman. Many thanks belong to Andrew +Knyazev, the author of the algorithm, for lots of advice and support. + +The algorithm of LOBPCG is described in detail in: + +A. V. Knyazev, Toward the Optimal Preconditioned Eigensolver: Locally Optimal Block Preconditioned Conjugate Gradient Method. SIAM Journal on Scientific Computing 23 (2001), no. 2, pp. 517-541. http://dx.doi.org/10.1137/S1064827500366124 + +A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov, Block Locally Optimal Preconditioned Eigenvalue Xolvers (BLOPEX) in hypre and PETSc (2007). http://arxiv.org/abs/0705.2626 Added: trunk/Lib/sandbox/lobpcg/X.txt =================================================================== --- trunk/Lib/sandbox/lobpcg/X.txt 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/X.txt 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,100 @@ + 9.4578390e-01 8.1748307e-01 5.2405098e-01 + 3.4042417e-01 2.0205362e-01 1.0183770e-01 + 6.0552424e-01 6.2714124e-01 2.8453380e-01 + 6.6734584e-01 7.9436448e-01 1.9751938e-02 + 8.2265186e-01 2.6926755e-01 3.6840847e-01 + 3.1469418e-01 3.0379238e-01 2.0080037e-01 + 2.4413695e-01 1.4521520e-01 7.6558574e-01 + 2.3406188e-02 7.7812449e-01 2.2445593e-01 + 3.7968672e-01 3.5230883e-03 8.7364788e-01 + 3.0544127e-01 5.5824667e-01 1.6885245e-01 + 6.0105945e-01 1.0270848e-01 4.2079194e-01 + 9.7298628e-01 8.9559614e-01 6.9914607e-01 + 6.3173944e-01 3.6783941e-02 8.7291415e-01 + 1.2850335e-02 6.7239405e-01 6.1485979e-02 + 2.2492574e-01 2.2410178e-01 6.9610368e-02 + 7.0026143e-01 3.2974585e-01 4.7714450e-01 + 4.6216202e-01 7.1810038e-01 1.3842199e-01 + 1.6075778e-01 2.8434470e-01 9.3491343e-01 + 5.9920565e-01 7.7861398e-01 5.5881318e-01 + 2.5610599e-01 4.8772724e-01 8.2174995e-01 + 8.4599049e-02 6.7666816e-01 6.7358438e-01 + 1.7276306e-01 3.3672791e-01 7.1731543e-01 + 9.3638799e-01 2.8648356e-01 3.3202492e-01 + 7.8378312e-01 1.8876724e-01 5.2164755e-01 + 1.3125747e-01 7.3192144e-01 1.4458665e-01 + 3.5898101e-01 2.5542739e-01 1.3423188e-01 + 6.6653581e-01 5.3403161e-01 6.1206489e-01 + 1.6424754e-01 8.5812639e-01 8.0589966e-01 + 1.2393097e-01 6.4786459e-01 1.0513908e-01 + 5.7704459e-01 2.7393541e-01 3.5526243e-01 + 9.7196693e-01 2.4268395e-01 7.7594090e-01 + 9.2527201e-01 7.5413139e-01 2.6376718e-01 + 7.1146287e-01 2.6500795e-01 4.7268878e-01 + 8.3386510e-02 1.9721810e-01 2.1390330e-01 + 8.8205593e-01 3.1468590e-01 1.4032796e-01 + 6.1270690e-01 1.7594654e-01 8.0526177e-01 + 8.1338010e-01 3.2610889e-02 6.9759916e-01 + 6.8591045e-01 1.8629491e-01 5.5777308e-01 + 1.9653411e-01 7.4549104e-01 1.0317975e-01 + 5.1337714e-01 6.4618762e-01 4.1435988e-02 + 1.0545021e-01 2.1482454e-01 3.5563984e-02 + 4.9898768e-01 1.9196579e-01 1.7861081e-01 + 5.2622726e-01 7.6245394e-01 2.3911878e-01 + 2.3876524e-01 7.3847955e-01 3.8956799e-01 + 5.2627999e-01 7.3547286e-01 5.5896867e-01 + 1.3614202e-01 1.5962335e-01 5.8394916e-01 + 2.8810635e-01 1.6800977e-01 1.6392705e-01 + 2.5869014e-01 3.8838829e-01 3.2601187e-01 + 7.9839770e-01 9.2825768e-01 1.7308552e-01 + 9.6538231e-01 7.3628476e-01 4.8175362e-01 + 2.7278367e-01 7.1852217e-01 1.9240004e-01 + 4.2377830e-01 6.1651846e-01 6.0603278e-01 + 7.1786841e-01 7.1124239e-01 6.4650000e-01 + 7.7306094e-01 5.9903451e-01 3.7940119e-01 + 5.8775950e-01 1.5754180e-01 3.3226044e-01 + 8.5990052e-02 1.1271291e-01 1.5367891e-01 + 9.7392075e-01 7.7935865e-01 4.0554907e-01 + 4.3989414e-01 6.0471517e-02 1.3175170e-01 + 8.4232543e-01 3.1856703e-01 7.5886305e-01 + 7.0553803e-01 2.0975170e-01 8.4424784e-01 + 2.6808711e-01 6.6512384e-01 7.9348594e-01 + 7.4909146e-01 3.1300856e-01 3.6137227e-01 + 5.3318446e-01 7.4007881e-01 1.3786857e-01 + 7.0134730e-01 9.9235966e-01 9.0322375e-01 + 2.1945692e-01 5.3450916e-01 6.4340685e-02 + 4.4056297e-01 7.2382493e-01 2.0988534e-01 + 2.3244923e-01 9.7651014e-01 4.6969657e-01 + 1.2116127e-02 9.4207183e-01 2.0084696e-01 + 3.4609005e-01 3.2863497e-01 3.5739789e-02 + 3.4162931e-01 4.0051132e-01 7.9403726e-01 + 5.5939743e-01 3.6049040e-01 3.5666314e-01 + 2.3202965e-01 2.4620348e-01 3.2548306e-01 + 2.4248651e-01 3.6670017e-01 4.8815216e-01 + 5.4151465e-01 1.9926619e-01 1.1374385e-01 + 3.8399210e-01 1.7141648e-01 5.9665625e-01 + 2.1168085e-01 6.3049642e-01 9.7777921e-01 + 4.3105094e-01 2.2652553e-02 6.8658420e-01 + 6.7361827e-02 7.8998130e-01 4.1351694e-02 + 7.2217423e-01 3.9178923e-02 7.9755060e-01 + 4.3727445e-01 9.7835298e-01 2.8601743e-01 + 8.4763941e-01 9.2475650e-01 9.9795252e-01 + 6.2537016e-01 2.8203512e-02 3.7445021e-01 + 4.8265728e-01 4.7061796e-01 2.7402319e-01 + 3.1011489e-01 5.7781953e-01 9.3695011e-01 + 7.8857881e-01 1.5002163e-01 8.0323307e-02 + 9.0247724e-01 8.8659951e-01 3.1252615e-01 + 5.5738324e-01 5.5138175e-02 1.1495353e-02 + 9.9828118e-01 3.9716206e-01 8.6223948e-01 + 3.7224336e-01 7.5698087e-01 4.7226552e-01 + 4.1067449e-01 3.6324903e-01 5.5833599e-01 + 2.0115001e-01 3.0476498e-01 9.6969943e-01 + 1.4328644e-01 5.9892441e-02 1.8394223e-01 + 8.6694382e-01 4.8836797e-01 9.3343161e-01 + 3.7118176e-01 4.0550509e-01 3.7320004e-02 + 2.9918396e-01 2.8109670e-01 2.2387914e-01 + 9.3042764e-01 6.9574924e-02 9.8663726e-01 + 7.6750765e-01 9.6047464e-01 1.5928122e-01 + 1.5166860e-01 5.8205830e-01 7.4238213e-01 + 2.5812146e-01 5.4972680e-01 6.7403608e-01 + 5.1000251e-01 8.3706633e-02 5.1344360e-01 Added: trunk/Lib/sandbox/lobpcg/__init__.py =================================================================== --- trunk/Lib/sandbox/lobpcg/__init__.py 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/__init__.py 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,11 @@ +"LOBPCG" + +from info import __doc__ +import lobpcg +__doc__ = '\n\n'.join( (lobpcg.__doc__, __doc__) ) +del lobpcg + +from lobpcg import * + +from numpy.testing import NumpyTest +test = NumpyTest().test Added: trunk/Lib/sandbox/lobpcg/info.py =================================================================== --- trunk/Lib/sandbox/lobpcg/info.py 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/info.py 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,16 @@ +""" +The algorithm of LOBPCG is described in detail in: + +A. V. Knyazev, Toward the Optimal Preconditioned Eigensolver: Locally Optimal Block Preconditioned Conjugate Gradient Method. SIAM Journal on Scientific Computing 23 (2001), no. 2, pp. 517-541. http://dx.doi.org/10.1137/S1064827500366124 + +A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov, Block Locally Optimal Preconditioned Eigenvalue Xolvers (BLOPEX) in hypre and PETSc (2007). http://arxiv.org/abs/0705.2626 + + +Depends upon symeig (http://mdp-toolkit.sourceforge.net/symeig.html) for the +moment, as the symmetric eigenvalue solvers were not available in scipy. + +Usage: XXXXX + + +""" +postpone_import = 1 Added: trunk/Lib/sandbox/lobpcg/lobpcg.py =================================================================== --- trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,480 @@ +""" +Pure SciPy implementation of Locally Optimal Block Preconditioned Conjugate +Gradient Method (LOBPCG), see +http://www-math.cudenver.edu/~aknyazev/software/BLOPEX/ + +License: BSD + +(c) Robert Cimrman, Andrew Knyazev +""" + +import numpy as nm +import scipy as sc +import scipy.sparse as sp +import scipy.linalg as la +import scipy.io as io +import types +from symeig import symeig + +def pause(): + raw_input() + +def save( ar, fileName ): + io.write_array( fileName, ar, precision = 8 ) + +## +# 21.05.2007, c +def as2d( ar ): + if ar.ndim == 2: + return ar + else: # Assume 1! + aux = nm.array( ar, copy = False ) + aux.shape = (ar.shape[0], 1) + return aux + +## +# 05.04.2007, c +# 10.04.2007 +def makeOperator( operatorInput, expectedShape ): + class Operator( object ): + def __call__( self, vec ): + return self.call( vec ) + + operator = Operator() + operator.obj = operatorInput + + if hasattr( operatorInput, 'shape' ): + operator.shape = operatorInput.shape + operator.dtype = operatorInput.dtype + if operator.shape != expectedShape: + raise ValueError, 'bad operator shape %s != %s' \ + % (expectedShape, operator.shape) + if sp.issparse( operatorInput ): + def call( vec ): + out = operator.obj * vec + if sp.issparse( out ): + out = out.toarray() + return as2d( out ) + else: + def call( vec ): + return as2d( nm.asarray( sc.dot( operator.obj, vec ) ) ) + operator.call = call + + elif isinstance( operatorInput, types.FunctionType ) or \ + isinstance( operatorInput, types.BuiltinFunctionType ): + operator.shape = expectedShape + operator.dtype = nm.float64 + operator.call = operatorInput + + return operator + +## +# 05.04.2007, c +def applyConstraints( blockVectorV, factYBY, blockVectorBY, blockVectorY ): + """Changes blockVectorV in place.""" + gramYBV = sc.dot( blockVectorBY.T, blockVectorV ) + tmp = la.cho_solve( factYBY, gramYBV ) + blockVectorV -= sc.dot( blockVectorY, tmp ) + +## +# 05.04.2007, c +def b_orthonormalize( operatorB, blockVectorV, + blockVectorBV = None, retInvR = False ): + + if blockVectorBV is None: + if operatorB is not None: + blockVectorBV = operatorB( blockVectorV ) + else: + blockVectorBV = blockVectorV # Shared data!!! + gramVBV = sc.dot( blockVectorV.T, blockVectorBV ) + gramVBV = la.cholesky( gramVBV ) + la.inv( gramVBV, overwrite_a = True ) + # gramVBV is now R^{-1}. + blockVectorV = sc.dot( blockVectorV, gramVBV ) + if operatorB is not None: + blockVectorBV = sc.dot( blockVectorBV, gramVBV ) + + if retInvR: + return blockVectorV, blockVectorBV, gramVBV + else: + return blockVectorV, blockVectorBV + +## +# 04.04.2007, c +# 05.04.2007 +# 06.04.2007 +# 10.04.2007 +def lobpcg( blockVectorX, operatorA, + operatorB = None, operatorT = None, blockVectorY = None, + residualTolerance = None, maxIterations = 20, + largest = True, verbosityLevel = 0, + retLambdaHistory = False, retResidualNormsHistory = False ): + + exitFlag = 0 + + if blockVectorY is not None: + sizeY = blockVectorY.shape[1] + else: + sizeY = 0 + + # Block size. + n, sizeX = blockVectorX.shape + if sizeX > n: + raise ValueError,\ + 'the first input argument blockVectorX must be tall, not fat' +\ + ' (%d, %d)' % blockVectorX.shape + + if n < 1: + raise ValueError,\ + 'the matrix size is wrong (%d)' % n + + operatorA = makeOperator( operatorA, (n, n) ) + + if operatorB is not None: + operatorB = makeOperator( operatorB, (n, n) ) + + if operatorT is not None: + operatorT = makeOperator( operatorT, (n, n) ) +## if n != operatorA.shape[0]: +## aux = 'The size (%d, %d) of operatorA is not the same as\n'+\ +## '%d - the number of rows of blockVectorX' % operatorA.shape + (n,) +## raise ValueError, aux + +## if operatorA.shape[0] != operatorA.shape[1]: +## raise ValueError, 'operatorA must be a square matrix (%d, %d)' %\ +## operatorA.shape + + if residualTolerance is None: + residualTolerance = sqrt( 1e-15 ) * n + + maxIterations = min( n, maxIterations ) + + if verbosityLevel: + aux = "Solving " + if operatorB is None: + aux += "standard" + else: + aux += "generalized" + aux += " eigenvalue problem with" + if operatorT is None: + aux += "out" + aux += " preconditioning\n\n" + aux += "matrix size %d\n" % n + aux += "block size %d\n\n" % sizeX + if blockVectorY is None: + aux += "No constraints\n\n" + else: + if sizeY > 1: + aux += "%d constraints\n\n" % sizeY + else: + aux += "%d constraint\n\n" % sizeY + print aux + + ## + # Apply constraints to X. + if blockVectorY is not None: + + if operatorB is not None: + blockVectorBY = operatorB( blockVectorY ) + else: + blockVectorBY = blockVectorY + + # gramYBY is a dense array. + gramYBY = sc.dot( blockVectorY.T, blockVectorBY ) + try: + # gramYBY is a Cholesky factor from now on... + gramYBY = la.cho_factor( gramYBY ) + except: + print 'cannot handle linear dependent constraints' + raise + + applyConstraints( blockVectorX, gramYBY, blockVectorBY, blockVectorY ) + + ## + # B-orthonormalize X. + blockVectorX, blockVectorBX = b_orthonormalize( operatorB, blockVectorX ) + + ## + # Compute the initial Ritz vectors: solve the eigenproblem. + blockVectorAX = operatorA( blockVectorX ) + gramXAX = sc.dot( blockVectorX.T, blockVectorAX ) + # gramXBX is X^T * X. + gramXBX = sc.dot( blockVectorX.T, blockVectorX ) + _lambda, eigBlockVector = symeig( gramXAX ) + ii = nm.argsort( _lambda )[:sizeX] + if largest: + ii = ii[::-1] + _lambda = _lambda[ii] + eigBlockVector = nm.asarray( eigBlockVector[:,ii] ) +# pause() + blockVectorX = sc.dot( blockVectorX, eigBlockVector ) + blockVectorAX = sc.dot( blockVectorAX, eigBlockVector ) + if operatorB is not None: + blockVectorBX = sc.dot( blockVectorBX, eigBlockVector ) + + ## + # Active index set. + activeMask = nm.ones( (sizeX,), dtype = nm.bool ) + + lambdaHistory = [_lambda] + residualNormsHistory = [] + + previousBlockSize = sizeX + ident = nm.eye( sizeX, dtype = operatorA.dtype ) + ident0 = nm.eye( sizeX, dtype = operatorA.dtype ) + + ## + # Main iteration loop. + for iterationNumber in xrange( maxIterations ): + if verbosityLevel > 0: + print 'iteration %d' % iterationNumber + + aux = blockVectorBX * _lambda[nm.newaxis,:] + blockVectorR = blockVectorAX - aux + + aux = nm.sum( blockVectorR.conjugate() * blockVectorR, 0 ) + residualNorms = nm.sqrt( aux ) + + +## if iterationNumber == 2: +## print blockVectorAX +## print blockVectorBX +## print blockVectorR +## pause() + + residualNormsHistory.append( residualNorms ) + + ii = nm.where( residualNorms > residualTolerance, True, False ) + activeMask = activeMask & ii + if verbosityLevel > 2: + print activeMask + + currentBlockSize = activeMask.sum() + if currentBlockSize != previousBlockSize: + previousBlockSize = currentBlockSize + ident = nm.eye( currentBlockSize, dtype = operatorA.dtype ) + + + if currentBlockSize == 0: + failureFlag = 0 # All eigenpairs converged. + break + + if verbosityLevel > 0: + print 'current block size:', currentBlockSize + print 'eigenvalue:', _lambda + print 'residual norms:', residualNorms + if verbosityLevel > 10: + print eigBlockVector + + activeBlockVectorR = as2d( blockVectorR[:,activeMask] ) + + if iterationNumber > 0: + activeBlockVectorP = as2d( blockVectorP[:,activeMask] ) + activeBlockVectorAP = as2d( blockVectorAP[:,activeMask] ) + activeBlockVectorBP = as2d( blockVectorBP[:,activeMask] ) + +# print activeBlockVectorR + if operatorT is not None: + ## + # Apply preconditioner T to the active residuals. + activeBlockVectorR = operatorT( activeBlockVectorR ) + +# assert nm.all( blockVectorR == activeBlockVectorR ) + + ## + # Apply constraints to the preconditioned residuals. + applyConstraints( activeBlockVectorR, + gramYBY, blockVectorBY, blockVectorY ) + +# assert nm.all( blockVectorR == activeBlockVectorR ) + + ## + # B-orthonormalize the preconditioned residuals. +# print activeBlockVectorR + + aux = b_orthonormalize( operatorB, activeBlockVectorR ) + activeBlockVectorR, activeBlockVectorBR = aux +# print activeBlockVectorR + + activeBlockVectorAR = operatorA( activeBlockVectorR ) + + if iterationNumber > 0: + aux = b_orthonormalize( operatorB, activeBlockVectorP, + activeBlockVectorBP, retInvR = True ) + activeBlockVectorP, activeBlockVectorBP, invR = aux + activeBlockVectorAP = sc.dot( activeBlockVectorAP, invR ) + + ## + # Perform the Rayleigh Ritz Procedure: + # Compute symmetric Gram matrices: + + xaw = sc.dot( blockVectorX.T, activeBlockVectorAR ) + waw = sc.dot( activeBlockVectorR.T, activeBlockVectorAR ) + xbw = sc.dot( blockVectorX.T, activeBlockVectorBR ) + + if iterationNumber > 0: + xap = sc.dot( blockVectorX.T, activeBlockVectorAP ) + wap = sc.dot( activeBlockVectorR.T, activeBlockVectorAP ) + pap = sc.dot( activeBlockVectorP.T, activeBlockVectorAP ) + xbp = sc.dot( blockVectorX.T, activeBlockVectorBP ) + wbp = sc.dot( activeBlockVectorR.T, activeBlockVectorBP ) + + gramA = nm.bmat( [[nm.diag( _lambda ), xaw, xap], + [xaw.T, waw, wap], + [xap.T, wap.T, pap]] ) + try: + gramB = nm.bmat( [[ident0, xbw, xbp], + [xbw.T, ident, wbp], + [xbp.T, wbp.T, ident]] ) + except: + print ident + print xbw + raise + else: + gramA = nm.bmat( [[nm.diag( _lambda ), xaw], + [xaw.T, waw]] ) + gramB = nm.bmat( [[ident0, xbw], + [xbw.T, ident0]] ) + try: + assert nm.allclose( gramA.T, gramA ) + except: + print gramA.T - gramA + raise + + try: + assert nm.allclose( gramB.T, gramB ) + except: + print gramB.T - gramB + raise + +## print nm.diag( _lambda ) +## print xaw +## print waw +## print xbw +## try: +## print xap +## print wap +## print pap +## print xbp +## print wbp +## except: +## pass +## pause() + + if verbosityLevel > 10: + save( gramA, 'gramA' ) + save( gramB, 'gramB' ) + ## + # Solve the generalized eigenvalue problem. +# _lambda, eigBlockVector = la.eig( gramA, gramB ) + _lambda, eigBlockVector = symeig( gramA, gramB ) + ii = nm.argsort( _lambda )[:sizeX] + if largest: + ii = ii[::-1] + if verbosityLevel > 10: + print ii + + _lambda = _lambda[ii].astype( nm.float64 ) + eigBlockVector = nm.asarray( eigBlockVector[:,ii].astype( nm.float64 ) ) + if verbosityLevel > 10: + print 'lambda:', _lambda +## # Normalize eigenvectors! +## aux = nm.sum( eigBlockVector.conjugate() * eigBlockVector, 0 ) +## eigVecNorms = nm.sqrt( aux ) +## eigBlockVector = eigBlockVector / eigVecNorms[nm.newaxis,:] +# eigBlockVector, aux = b_orthonormalize( operatorB, eigBlockVector ) + + if verbosityLevel > 10: + print eigBlockVector + pause() + ## + # Compute Ritz vectors. + if iterationNumber > 0: + eigBlockVectorX = eigBlockVector[:sizeX] + eigBlockVectorR = eigBlockVector[sizeX:sizeX+currentBlockSize] + eigBlockVectorP = eigBlockVector[sizeX+currentBlockSize:] + + pp = sc.dot( activeBlockVectorR, eigBlockVectorR )\ + + sc.dot( activeBlockVectorP, eigBlockVectorP ) + + app = sc.dot( activeBlockVectorAR, eigBlockVectorR )\ + + sc.dot( activeBlockVectorAP, eigBlockVectorP ) + + bpp = sc.dot( activeBlockVectorBR, eigBlockVectorR )\ + + sc.dot( activeBlockVectorBP, eigBlockVectorP ) + else: + eigBlockVectorX = eigBlockVector[:sizeX] + eigBlockVectorR = eigBlockVector[sizeX:] + + pp = sc.dot( activeBlockVectorR, eigBlockVectorR ) + + app = sc.dot( activeBlockVectorAR, eigBlockVectorR ) + + bpp = sc.dot( activeBlockVectorBR, eigBlockVectorR ) + + if verbosityLevel > 10: + print pp + print app + print bpp + pause() +# print pp.shape, app.shape, bpp.shape + + blockVectorX = sc.dot( blockVectorX, eigBlockVectorX ) + pp + blockVectorAX = sc.dot( blockVectorAX, eigBlockVectorX ) + app + blockVectorBX = sc.dot( blockVectorBX, eigBlockVectorX ) + bpp + + blockVectorP, blockVectorAP, blockVectorBP = pp, app, bpp + + aux = blockVectorBX * _lambda[nm.newaxis,:] + blockVectorR = blockVectorAX - aux + + aux = nm.sum( blockVectorR.conjugate() * blockVectorR, 0 ) + residualNorms = nm.sqrt( aux ) + + + if verbosityLevel > 0: + print 'final eigenvalue:', _lambda + print 'final residual norms:', residualNorms + + + return _lambda, eigBlockVectorX + +########################################################################### +if __name__ == '__main__': + from scipy.sparse import spdiags, speye + import time + +## def operatorB( vec ): +## return vec + + n = 100 + vals = [nm.arange( n, dtype = nm.float64 ) + 1] + operatorA = spdiags( vals, 0, n, n ) + operatorB = speye( n, n ) +# operatorB[0,0] = 0 + operatorB = nm.eye( n, n ) + Y = nm.eye( n, 3 ) + + +## X = sc.rand( n, 3 ) + xfile = {100 : 'X.txt', 1000 : 'X2.txt', 10000 : 'X3.txt'} + X = nm.fromfile( xfile[n], dtype = nm.float64, sep = ' ' ) + X.shape = (n, 3) + + ivals = [1./vals[0]] + def precond( x ): + invA = spdiags( ivals, 0, n, n ) + y = invA * x + if sp.issparse( y ): + y = y.toarray() + + return as2d( y ) + + tt = time.clock() + eigs, vecs = lobpcg( X, operatorA, operatorB, blockVectorY = Y, + operatorT = precond, + residualTolerance = 1e-4, maxIterations = 40, + largest = False, verbosityLevel = 1 ) + print 'solution time:', time.clock() - tt + print eigs + Added: trunk/Lib/sandbox/lobpcg/setup.py =================================================================== --- trunk/Lib/sandbox/lobpcg/setup.py 2007-05-22 09:19:14 UTC (rev 3028) +++ trunk/Lib/sandbox/lobpcg/setup.py 2007-05-22 09:56:10 UTC (rev 3029) @@ -0,0 +1,16 @@ +#!/usr/bin/env python +from os.path import join +import sys + +def configuration(parent_package='',top_path=None): + from numpy.distutils.misc_util import Configuration + from numpy.distutils.system_info import get_info + + config = Configuration('lobpcg',parent_package,top_path) +# config.add_data_dir('tests') + + return config + +if __name__ == '__main__': + from numpy.distutils.core import setup + setup(**configuration(top_path='').todict()) Property changes on: trunk/Lib/sandbox/lobpcg/setup.py ___________________________________________________________________ Name: svn:executable + * From scipy-svn at scipy.org Tue May 22 10:40:55 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 09:40:55 -0500 (CDT) Subject: [Scipy-svn] r3030 - trunk/Lib/sandbox/lobpcg Message-ID: <20070522144055.BC82A39C15D@new.scipy.org> Author: rc Date: 2007-05-22 09:40:52 -0500 (Tue, 22 May 2007) New Revision: 3030 Modified: trunk/Lib/sandbox/lobpcg/lobpcg.py Log: small fixes Modified: trunk/Lib/sandbox/lobpcg/lobpcg.py =================================================================== --- trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-22 09:56:10 UTC (rev 3029) +++ trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-22 14:40:52 UTC (rev 3030) @@ -145,7 +145,7 @@ ## operatorA.shape if residualTolerance is None: - residualTolerance = sqrt( 1e-15 ) * n + residualTolerance = nm.sqrt( 1e-15 ) * n maxIterations = min( n, maxIterations ) @@ -283,8 +283,9 @@ ## # Apply constraints to the preconditioned residuals. - applyConstraints( activeBlockVectorR, - gramYBY, blockVectorBY, blockVectorY ) + if blockVectorY is not None: + applyConstraints( activeBlockVectorR, + gramYBY, blockVectorBY, blockVectorY ) # assert nm.all( blockVectorR == activeBlockVectorR ) From scipy-svn at scipy.org Tue May 22 14:15:48 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 13:15:48 -0500 (CDT) Subject: [Scipy-svn] r3031 - trunk/Lib/sandbox/timeseries Message-ID: <20070522181548.CB12239C16F@new.scipy.org> Author: mattknox_ca Date: 2007-05-22 13:15:43 -0500 (Tue, 22 May 2007) New Revision: 3031 Modified: trunk/Lib/sandbox/timeseries/tdates.py Log: optimized performance of dateinfo functions Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-22 14:40:52 UTC (rev 3030) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-22 18:15:43 UTC (rev 3031) @@ -336,7 +336,8 @@ def __getdateinfo__(self, info): return numeric.asarray(cseries.DA_getDateInfo(numeric.asarray(self), - self.freq, info), + self.freq, info, + int(self.isfull())), dtype=int_) __getDateInfo = __getdateinfo__ #.... Conversion methods .................... From scipy-svn at scipy.org Tue May 22 14:19:37 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 13:19:37 -0500 (CDT) Subject: [Scipy-svn] r3032 - trunk/Lib/sandbox/timeseries/src Message-ID: <20070522181937.D80D939C036@new.scipy.org> Author: mattknox_ca Date: 2007-05-22 13:19:32 -0500 (Tue, 22 May 2007) New Revision: 3032 Modified: trunk/Lib/sandbox/timeseries/src/c_tdates.c Log: optimized performance of dateinfo functions Modified: trunk/Lib/sandbox/timeseries/src/c_tdates.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-22 18:15:43 UTC (rev 3031) +++ trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-22 18:19:32 UTC (rev 3032) @@ -2429,19 +2429,169 @@ } +/************************************************************** +** The following functions are used by DateArray_getDateInfo ** +** to determine how many consecutive periods will have the ** +** same result ** +**************************************************************/ + +// also used for qyear +static int __skip_periods_year(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_QTR: + return 4; + case FR_MTH: + return 12; + case FR_WK: + return 51; + case FR_BUS: + return 260; + case FR_DAY: + return 365; + case FR_HR: + return 365*24; + case FR_MIN: + return 365*24*60; + case FR_SEC: + return 365*24*60*60; + default: + return 1; + } +} + +static int __skip_periods_quarter(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_MTH: + return 3; + case FR_WK: + return 12; + case FR_BUS: + return 64; + case FR_DAY: + return 90; + case FR_HR: + return 90*24; + case FR_MIN: + return 90*24*60; + case FR_SEC: + return 90*24*60*60; + default: + return 1; + } +} + +static int __skip_periods_month(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_WK: + return 3; + case FR_BUS: + return 20; + case FR_DAY: + return 28; + case FR_HR: + return 28*24; + case FR_MIN: + return 28*24*60; + case FR_SEC: + return 28*24*60*60; + default: + return 1; + } +} + +// also used for day_of_year, day_of_week +static int __skip_periods_day(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_HR: + return 24; + case FR_MIN: + return 24*60; + case FR_SEC: + return 24*60*60; + default: + return 1; + } +} + +static int __skip_periods_week(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_BUS: + return 5; + case FR_DAY: + return 7; + case FR_HR: + return 7*28*24; + case FR_MIN: + return 7*28*24*60; + case FR_SEC: + return 7*28*24*60*60; + default: + return 1; + } +} + +static int __skip_periods_hour(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_MIN: + return 60; + case FR_SEC: + return 60*60; + default: + return 1; + } +} + +static int __skip_periods_minute(int freq) { + + int freq_group = get_freq_group(freq); + + switch(freq_group) + { + case FR_SEC: + return 60; + default: + return 1; + } +} + PyObject * DateArray_getDateInfo(PyObject *self, PyObject *args) { - int freq; + int freq, is_full, skip_periods, counter=1, val_changed=0; char *info; - PyArrayObject *array; - PyArrayObject *newArray; + PyObject *prev_val=NULL; + PyArrayObject *array, *newArray; PyArrayIterObject *iterSource, *iterResult; PyObject* (*getDateInfo)(DateObject*, void*) = NULL; - if (!PyArg_ParseTuple(args, "Ois:getDateInfo(array, freq, info)", &array, &freq, &info)) return NULL; + if (!PyArg_ParseTuple(args, "Oisi:getDateInfo(array, freq, info, is_full)", + &array, &freq, &info, &is_full)) return NULL; newArray = (PyArrayObject *)PyArray_Copy(array); iterSource = (PyArrayIterObject *)PyArray_IterNew((PyObject *)array); @@ -2452,59 +2602,95 @@ { case 'Y': //year getDateInfo = &DateObject_year; + skip_periods = __skip_periods_year(freq); break; case 'F': //"fiscal" year getDateInfo = &DateObject_qyear; + skip_periods = __skip_periods_year(freq); break; case 'Q': //quarter getDateInfo = &DateObject_quarter; + skip_periods = __skip_periods_quarter(freq); break; case 'M': //month getDateInfo = &DateObject_month; + skip_periods = __skip_periods_month(freq); break; case 'D': //day getDateInfo = &DateObject_day; + skip_periods = __skip_periods_day(freq); break; case 'R': //day of year getDateInfo = &DateObject_day_of_year; + skip_periods = __skip_periods_day(freq); break; case 'W': //day of week getDateInfo = &DateObject_day_of_week; + skip_periods = __skip_periods_day(freq); break; case 'I': //week of year getDateInfo = &DateObject_week; + skip_periods = __skip_periods_week(freq); break; case 'H': //hour getDateInfo = &DateObject_hour; + skip_periods = __skip_periods_hour(freq); break; case 'T': //minute getDateInfo = &DateObject_minute; + skip_periods = __skip_periods_minute(freq); break; case 'S': //second getDateInfo = &DateObject_second; + skip_periods = 1; break; default: return NULL; } - while (iterSource->index < iterSource->size) { + { DateObject *curr_date; PyObject *val, *dInfo; - val = PyArray_GETITEM(array, iterSource->dataptr); - curr_date = DateObject_FromFreqAndValue(freq, PyInt_AsLong(val)); - dInfo = getDateInfo(curr_date, NULL); + while (iterSource->index < iterSource->size) { - PyArray_SETITEM(newArray, iterResult->dataptr, dInfo); + if ((val_changed == 0) || + (is_full == 0) || + (prev_val == NULL) || + (counter >= skip_periods)) { - Py_DECREF(val); - Py_DECREF(curr_date); - Py_DECREF(dInfo); + val = PyArray_GETITEM(array, iterSource->dataptr); + curr_date = DateObject_FromFreqAndValue(freq, PyInt_AsLong(val)); + dInfo = getDateInfo(curr_date, NULL); - PyArray_ITER_NEXT(iterSource); - PyArray_ITER_NEXT(iterResult); + if ((prev_val != NULL) && + (PyInt_AsLong(prev_val) != PyInt_AsLong(dInfo))) { + val_changed = 1; + counter = 0; + } + + Py_DECREF(val); + Py_DECREF(curr_date); + + if (prev_val != NULL) { + Py_DECREF(prev_val); + } + + prev_val = dInfo; + } + + PyArray_SETITEM(newArray, iterResult->dataptr, dInfo); + + PyArray_ITER_NEXT(iterSource); + PyArray_ITER_NEXT(iterResult); + + counter += 1; + } } + if (prev_val != NULL) { + Py_DECREF(prev_val); + } Py_DECREF(iterSource); Py_DECREF(iterResult); From scipy-svn at scipy.org Tue May 22 16:19:56 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 22 May 2007 15:19:56 -0500 (CDT) Subject: [Scipy-svn] r3033 - trunk/Lib/sandbox/timeseries Message-ID: <20070522201956.2DD0A39C016@new.scipy.org> Author: mattknox_ca Date: 2007-05-22 15:19:52 -0500 (Tue, 22 May 2007) New Revision: 3033 Modified: trunk/Lib/sandbox/timeseries/tseries.py Log: operations on incompatible TimeSeries objects now returns a plain MaskedArray instead of raising an error. Compatible TimeSeries still return a TimeSeries object. Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-22 18:19:32 UTC (rev 3032) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-22 20:19:52 UTC (rev 3033) @@ -122,24 +122,36 @@ TimeSeriesError.__init__(self, msg) #def _compatibilitycheck(a, b): -def _timeseriescompat(a, b): +def _timeseriescompat(a, b, raise_error=True): """Checks the date compatibility of two TimeSeries object. Returns True if everything's fine, or raises an exception.""" if not (hasattr(a,'freq') and hasattr(b, 'freq')): return True if a.freq != b.freq: - raise TimeSeriesCompatibilityError('freq', a.freq, b.freq) + if raise_error: + raise TimeSeriesCompatibilityError('freq', a.freq, b.freq) + else: + return False elif a.start_date != b.start_date: - raise TimeSeriesCompatibilityError('start_date', - a.start_date, b.start_date) + if raise_error: + raise TimeSeriesCompatibilityError('start_date', + a.start_date, b.start_date) + else: + return False else: step_diff = a._dates.get_steps() != b._dates.get_steps() if (step_diff is True) or (hasattr(step_diff, "any") and step_diff.any()): - raise TimeSeriesCompatibilityError('time_steps', - a._dates.get_steps(), b._dates.get_steps()) + if raise_error: + raise TimeSeriesCompatibilityError('time_steps', + a._dates.get_steps(), b._dates.get_steps()) + else: + return False elif a.shape != b.shape: - raise TimeSeriesCompatibilityError('size', "1: %s" % str(a.shape), - "2: %s" % str(b.shape)) + if raise_error: + raise TimeSeriesCompatibilityError('size', "1: %s" % str(a.shape), + "2: %s" % str(b.shape)) + else: + return False return True @@ -234,10 +246,16 @@ "Execute the call behavior." instance = self.obj if isinstance(other, TimeSeries): - assert(_timeseriescompat(instance, other)) + compat = _timeseriescompat(instance, other, raise_error=False) + else: + compat = True + func = getattr(super(TimeSeries, instance), self._name) - result = func(other, *args).view(type(instance)) - result._dates = instance._dates + if compat: + result = func(other, *args).view(type(instance)) + result._dates = instance._dates + else: + result = func(other, *args)._series return result class _tsarraymethod(object): From scipy-svn at scipy.org Wed May 23 10:11:52 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 23 May 2007 09:11:52 -0500 (CDT) Subject: [Scipy-svn] r3034 - trunk/Lib/sandbox/timeseries/lib/tests Message-ID: <20070523141152.E308939C0D1@new.scipy.org> Author: mattknox_ca Date: 2007-05-23 09:11:48 -0500 (Wed, 23 May 2007) New Revision: 3034 Modified: trunk/Lib/sandbox/timeseries/lib/tests/test_moving_funcs.py Log: added tests for new moving functions Modified: trunk/Lib/sandbox/timeseries/lib/tests/test_moving_funcs.py =================================================================== --- trunk/Lib/sandbox/timeseries/lib/tests/test_moving_funcs.py 2007-05-22 20:19:52 UTC (rev 3033) +++ trunk/Lib/sandbox/timeseries/lib/tests/test_moving_funcs.py 2007-05-23 14:11:48 UTC (rev 3034) @@ -18,12 +18,15 @@ import maskedarray.testutils from maskedarray.testutils import * +import maskedarray as MA import maskedarray.core as coremodule from maskedarray.core import MaskedArray, masked +from maskedarray import mstats +import timeseries as TS from timeseries import time_series, thisday -from timeseries.lib.moving_funcs import cmov_average +from timeseries.lib import moving_funcs as MF class test_cmov_average(NumpyTestCase): @@ -37,7 +40,7 @@ data = self.data for width in [3,5,7]: k = (width-1)/2 - ravg = cmov_average(data,width) + ravg = MF.cmov_average(data,width) assert(isinstance(ravg, MaskedArray)) assert_equal(ravg, data) assert_equal(ravg._mask, [1]*k+[0]*(len(data)-2*k)+[1]*k) @@ -46,7 +49,7 @@ data = self.maskeddata for width in [3,5,7]: k = (width-1)/2 - ravg = cmov_average(data,width) + ravg = MF.cmov_average(data,width) assert(isinstance(ravg, MaskedArray)) assert_equal(ravg, data) m = N.zeros(len(data), N.bool_) @@ -57,7 +60,7 @@ data = time_series(self.maskeddata, start_date=thisday('D')) for width in [3,5,7]: k = (width-1)/2 - ravg = cmov_average(data,width) + ravg = MF.cmov_average(data,width) assert(isinstance(ravg, MaskedArray)) assert_equal(ravg, data) m = N.zeros(len(data), N.bool_) @@ -71,7 +74,7 @@ data = time_series(maskeddata, start_date=thisday('D')) for width in [3,5,7]: k = (width-1)/2 - ravg = cmov_average(data,width) + ravg = MF.cmov_average(data,width) assert(isinstance(ravg, MaskedArray)) assert_almost_equal(ravg[18].squeeze(), data[18-k:18+k+1].mean(0)) m = N.zeros(data.shape, N.bool_) @@ -79,6 +82,65 @@ assert_equal(ravg._mask, m) assert_equal(ravg._dates, data._dates) + + +class test_mov_funcs(NumpyTestCase): + + def __init__(self, *args, **kwds): + NumpyTestCase.__init__(self, *args, **kwds) + self.data = numeric.arange(25) + self.maskeddata = MaskedArray(self.data) + self.maskeddata[10] = masked + self.func_pairs = [ + (MF.mov_average, MA.mean), + (MF.mov_median, mstats.mmedian), + ((lambda x, span : MF.mov_stddev(x, span, bias=True)), MA.std)] + # + def test_onregulararray(self): + data = self.data + for Mfunc, Nfunc in self.func_pairs: + for k in [3,4,5]: + result = Mfunc(data, k) + assert(isinstance(result, MaskedArray)) + for x in range(len(data)-k+1): + assert_almost_equal(result[x+k-1], Nfunc(data[x:x+k])) + assert_equal(result._mask, [1]*(k-1)+[0]*(len(data)-k+1)) + + # + def test_onmaskedarray(self): + data = self.maskeddata + + for Mfunc, Nfunc in self.func_pairs: + for k in [3,4,5]: + result = Mfunc(data, k) + assert(isinstance(result, MaskedArray)) + for x in range(len(data)-k+1): + if result[x+k-1] is not MA.masked: + assert_almost_equal(result[x+k-1], Nfunc(data[x:x+k])) + result_mask = N.array([1]*(k-1)+[0]*(len(data)-k+1)) + result_mask[10:10+k] = 1 + assert_equal(result._mask, result_mask) + + # + def test_ontimeseries(self): + + data = time_series(self.maskeddata, start_date=thisday('D')) + + for Mfunc, Nfunc in self.func_pairs: + for k in [3,4,5]: + result = Mfunc(data, k) + assert(isinstance(result, MaskedArray)) + for x in range(len(data)-k+1): + if result[x+k-1] is not TS.tsmasked: + assert_almost_equal( + N.asarray(result[x+k-1]), + N.asarray(Nfunc(data[x:x+k]))) + result_mask = N.array([1]*(k-1)+[0]*(len(data)-k+1)) + result_mask[10:10+k] = 1 + assert_equal(result._mask, result_mask) + assert_equal(result._dates, data._dates) + + #------------------------------------------------------------------------------ if __name__ == "__main__": NumpyTest().run() \ No newline at end of file From scipy-svn at scipy.org Wed May 23 10:38:25 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 23 May 2007 09:38:25 -0500 (CDT) Subject: [Scipy-svn] r3035 - in trunk/Lib/sandbox/timeseries: . plotlib tests Message-ID: <20070523143825.DA44039C187@new.scipy.org> Author: pierregm Date: 2007-05-23 09:38:21 -0500 (Wed, 23 May 2007) New Revision: 3035 Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py trunk/Lib/sandbox/timeseries/tdates.py trunk/Lib/sandbox/timeseries/tests/test_timeseries.py trunk/Lib/sandbox/timeseries/tseries.py Log: plotlib.mpl_timeseries : copy the _series attribute from a figure to a subplot at the creation tdates : fixed a pb w/ _cache tseries : updated __all__ Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-23 14:11:48 UTC (rev 3034) +++ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-23 14:38:21 UTC (rev 3035) @@ -857,7 +857,12 @@ def add_tsplot(axes, *args, **kwargs): kwargs.update(SubplotClass=TimeSeriesPlot) if 'series' not in kwargs.keys(): - kwargs['series'] = None + if hasattr(axes, 'series'): + kwargs['series'] = axes.series + elif hasattr(axes, '_series'): + kwargs['series'] = axes._series + else: + kwargs['series'] = None return add_generic_subplot(axes, *args, **kwargs) Figure.add_tsplot = add_tsplot Modified: trunk/Lib/sandbox/timeseries/tdates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tdates.py 2007-05-23 14:11:48 UTC (rev 3034) +++ trunk/Lib/sandbox/timeseries/tdates.py 2007-05-23 14:38:21 UTC (rev 3035) @@ -238,10 +238,9 @@ for k in ('toobj', 'tostr', 'toord') if _cache[k] is not None])) _cache['steps'] = None - if reset_full: - _cache['full'] = None - _cache['hasdups'] = None - + if reset_full: + _cache['full'] = None + _cache['hasdups'] = None return r def __getslice__(self, i, j): Modified: trunk/Lib/sandbox/timeseries/tests/test_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-23 14:11:48 UTC (rev 3034) +++ trunk/Lib/sandbox/timeseries/tests/test_timeseries.py 2007-05-23 14:38:21 UTC (rev 3035) @@ -28,8 +28,8 @@ from timeseries import tseries from timeseries import Date, date_array_fromlist, date_array, thisday from timeseries import time_series, TimeSeries, adjust_endpoints, \ - mask_period, align_series, fill_missing_dates, tsmasked, concatenate_series,\ - stack, split + mask_period, align_series, align_with, fill_missing_dates, tsmasked, \ + concatenate_series, stack, split class test_creation(NumpyTestCase): "Base test class for MaskedArrays." Modified: trunk/Lib/sandbox/timeseries/tseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/tseries.py 2007-05-23 14:11:48 UTC (rev 3034) +++ trunk/Lib/sandbox/timeseries/tseries.py 2007-05-23 14:38:21 UTC (rev 3035) @@ -48,7 +48,7 @@ 'TimeSeriesError','TimeSeriesCompatibilityError','TimeSeries','isTimeSeries', 'time_series', 'tsmasked', 'mask_period','mask_inside_period','mask_outside_period','compressed', -'adjust_endpoints','align_series','aligned','convert','group_byperiod', +'adjust_endpoints','align_series','align_with','aligned','convert','group_byperiod', 'pct','tshift','fill_missing_dates', 'split', 'stack', 'concatenate_series', 'empty_like', 'day_of_week','day_of_year','day','month','quarter','year', From scipy-svn at scipy.org Wed May 23 15:15:42 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 23 May 2007 14:15:42 -0500 (CDT) Subject: [Scipy-svn] r3036 - in trunk/Lib/stsci/image: lib src Message-ID: <20070523191542.CB12739C19E@new.scipy.org> Author: chanley Date: 2007-05-23 14:15:38 -0500 (Wed, 23 May 2007) New Revision: 3036 Modified: trunk/Lib/stsci/image/lib/_image.py trunk/Lib/stsci/image/src/_combinemodule.c Log: Fixing typecasting problem and making image easier to install outside of the scipy namespace. Modified: trunk/Lib/stsci/image/lib/_image.py =================================================================== --- trunk/Lib/stsci/image/lib/_image.py 2007-05-23 14:38:21 UTC (rev 3035) +++ trunk/Lib/stsci/image/lib/_image.py 2007-05-23 19:15:38 UTC (rev 3036) @@ -1,6 +1,6 @@ import numpy as num -import scipy.stsci.convolve as convolve -import scipy.stsci.convolve._correlate as _correlate +import convolve +import convolve._correlate as _correlate MLab=num def _translate(a, dx, dy, output=None, mode="nearest", cval=0.0): Modified: trunk/Lib/stsci/image/src/_combinemodule.c =================================================================== --- trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-23 14:38:21 UTC (rev 3035) +++ trunk/Lib/stsci/image/src/_combinemodule.c 2007-05-23 19:15:38 UTC (rev 3036) @@ -15,7 +15,7 @@ static int -_mask_and_sort(int ninputs, int index, Float64 **inputs, Bool **masks, +_mask_and_sort(int ninputs, int index, Float64 **inputs, UInt8 **masks, Float64 temp[MAX_ARRAYS]) { int i, j, goodpix; @@ -97,7 +97,7 @@ if (dim == maxdim-1) { Float64 sorted[MAX_ARRAYS]; Float64 *tinputs[MAX_ARRAYS]; - Bool *tmasks[MAX_ARRAYS]; + UInt8 *tmasks[MAX_ARRAYS]; Float64 *toutput; int cols = inputs[0]->dimensions[dim]; @@ -106,7 +106,7 @@ tinputs[i] = (Float64 *) inputs[i]->data; if (masks) { for(i=0; idata; + tmasks[i] = (UInt8 *) masks[i]->data; } toutput = (Float64 *) output->data; @@ -185,7 +185,7 @@ if (badmasks != Py_None) { a = PySequence_GetItem(badmasks, i); if (!a) return NULL; - bmk[i] = NA_InputArray(a, tBool, C_ARRAY); + bmk[i] = NA_InputArray(a, tUInt8, C_ARRAY); if (!bmk[i]) return NULL; Py_DECREF(a); } From scipy-svn at scipy.org Thu May 24 04:02:05 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 24 May 2007 03:02:05 -0500 (CDT) Subject: [Scipy-svn] r3037 - in trunk/Lib/optimize: . tests tnc Message-ID: <20070524080205.6908539C120@new.scipy.org> Author: stefan Date: 2007-05-24 03:01:29 -0500 (Thu, 24 May 2007) New Revision: 3037 Modified: trunk/Lib/optimize/linesearch.py trunk/Lib/optimize/tests/test_optimize.py trunk/Lib/optimize/tnc.py trunk/Lib/optimize/tnc/moduleTNC.c Log: fmin_tnc: Add tests, update documentation, fix order of output (closes ticket #423). Modified: trunk/Lib/optimize/linesearch.py =================================================================== --- trunk/Lib/optimize/linesearch.py 2007-05-23 19:15:38 UTC (rev 3036) +++ trunk/Lib/optimize/linesearch.py 2007-05-24 08:01:29 UTC (rev 3037) @@ -1,6 +1,6 @@ ## Automatically adapted for scipy Oct 07, 2005 by convertcode.py -import minpack2 +from scipy.optimize import minpack2 import numpy import sys Modified: trunk/Lib/optimize/tests/test_optimize.py =================================================================== --- trunk/Lib/optimize/tests/test_optimize.py 2007-05-23 19:15:38 UTC (rev 3036) +++ trunk/Lib/optimize/tests/test_optimize.py 2007-05-24 08:01:29 UTC (rev 3037) @@ -7,9 +7,11 @@ set_package_path() from scipy import optimize -from numpy import array, zeros, float64, dot, log, exp +from numpy import array, zeros, float64, dot, log, exp, inf +from scipy.optimize.tnc import RCSTRINGS, MSG_NONE restore_path() +from math import sin, cos, pow class test_optimize(NumpyTestCase): """ Test case for a simple constrained entropy maximization problem @@ -99,9 +101,10 @@ def check_ncg(self): """ line-search Newton conjugate gradient optimization routine """ - retval = optimize.fmin_ncg(self.func, self.startparams, self.grad,\ - args=(), maxiter=self.maxiter, \ - full_output=False, disp=False, retall=False) + retval = optimize.fmin_ncg(self.func, self.startparams, self.grad, + args=(), maxiter=self.maxiter, + full_output=False, disp=False, + retall=False) params = retval @@ -113,8 +116,9 @@ def check_l_bfgs_b(self): """ limited-memory bound-constrained BFGS algorithm """ - retval = optimize.fmin_l_bfgs_b(self.func, self.startparams, self.grad,\ - args=(), maxfun=self.maxiter) + retval = optimize.fmin_l_bfgs_b(self.func, self.startparams, + self.grad, args=(), + maxfun=self.maxiter) (params, fopt, d) = retval @@ -122,6 +126,104 @@ #print "LBFGSB: Difference is: " + str(err) assert err < 1e-6 +class test_tnc(NumpyTestCase): + """TNC non-linear optimization. + These tests are taken from Prof. K. Schittkowski's test examples + for constrained non-linear programming. + + http://www.uni-bayreuth.de/departments/math/~kschittkowski/home.htm + + """ + tests = [] + + def setUp(self): + def test1fg(x): + f = 100.0*pow((x[1]-pow(x[0],2)),2)+pow(1.0-x[0],2) + dif = [0,0] + dif[1] = 200.0*(x[1]-pow(x[0],2)) + dif[0] = -2.0*(x[0]*(dif[1]-1.0)+1.0) + return f, dif + self.tests.append((test1fg, [-2,1], ([-inf,None],[-1.5,None]), + [1,1])) + def test2fg(x): + f = 100.0*pow((x[1]-pow(x[0],2)),2)+pow(1.0-x[0],2) + dif = [0,0] + dif[1] = 200.0*(x[1]-pow(x[0],2)) + dif[0] = -2.0*(x[0]*(dif[1]-1.0)+1.0) + return f, dif + self.tests.append((test2fg, [-2,1], [(-inf,None),(1.5,None)], + [-1.2210262419616387,1.5])) + + def test3fg(x): + f = x[1]+pow(x[1]-x[0],2)*1.0e-5 + dif = [0,0] + dif[0] = -2.0*(x[1]-x[0])*1.0e-5 + dif[1] = 1.0-dif[0] + return f, dif + self.tests.append((test3fg, [10,1], [(-inf,None),(0.0, None)], + [0,0])) + + def test4fg(x): + f = pow(x[0]+1.0,3)/3.0+x[1] + dif = [0,0] + dif[0] = pow(x[0]+1.0,2) + dif[1] = 1.0 + return f, dif + self.tests.append((test4fg, [1.125,0.125], [(1, None),(0, None)], + [1,0])) + + def test5fg(x): + f = sin(x[0]+x[1])+pow(x[0]-x[1],2)-1.5*x[0]+2.5*x[1]+1.0 + dif = [0,0] + v1 = cos(x[0]+x[1]); + v2 = 2.0*(x[0]-x[1]); + + dif[0] = v1+v2-1.5; + dif[1] = v1-v2+2.5; + return f, dif + self.tests.append((test5fg, [0,0], [(-1.5, 4),(-3,3)], + [-0.54719755119659763, -1.5471975511965976])) + + def test38fg(x): + f = (100.0*pow(x[1]-pow(x[0],2),2) + \ + pow(1.0-x[0],2)+90.0*pow(x[3]-pow(x[2],2),2) + \ + pow(1.0-x[2],2)+10.1*(pow(x[1]-1.0,2)+pow(x[3]-1.0,2)) + \ + 19.8*(x[1]-1.0)*(x[3]-1.0))*1.0e-5 + dif = [0,0,0,0] + dif[0] = (-400.0*x[0]*(x[1]-pow(x[0],2))-2.0*(1.0-x[0]))*1.0e-5 + dif[1] = (200.0*(x[1]-pow(x[0],2))+20.2 \ + *(x[1]-1.0)+19.8*(x[3]-1.0))*1.0e-5 + dif[2] = (-360.0*x[2]*(x[3]-pow(x[2],2))-2.0\ + *(1.0-x[2]))*1.0e-5 + dif[3] = (180.0*(x[3]-pow(x[2],2))+20.2\ + *(x[3]-1.0)+19.8*(x[1]-1.0))*1.0e-5 + return f, dif + self.tests.append ((test38fg, [-3,-1,-3,-1], [(-10,10)]*4, [1]*4)) + + def test45fg(x): + f = 2.0-x[0]*x[1]*x[2]*x[3]*x[4]/120.0 + dif = [0]*5 + dif[0] = -x[1]*x[2]*x[3]*x[4]/120.0 + dif[1] = -x[0]*x[2]*x[3]*x[4]/120.0 + dif[2] = -x[0]*x[1]*x[3]*x[4]/120.0 + dif[3] = -x[0]*x[1]*x[2]*x[4]/120.0 + dif[4] = -x[0]*x[1]*x[2]*x[3]/120.0 + return f, dif + self.tests.append ((test45fg, [2]*5, [(0,1),(0,2),(0,3),(0,4),(0,5)], + [1,2,3,4,5])) + + def test_tnc(self): + for fg, x, bounds, xopt in self.tests: + x, nf, rc = optimize.fmin_tnc(fg, x, bounds=bounds, + messages=MSG_NONE, maxfun=200) + err = "Failed optimization of %s.\n" \ + "After %d function evaluations, TNC returned: %s.""" % \ + (fg.__name__, nf, RCSTRINGS[rc]) + + assert_array_almost_equal(array(x,dtype=float), + array(xopt,dtype=float), + err_msg=err) + if __name__ == "__main__": NumpyTest().run() Modified: trunk/Lib/optimize/tnc/moduleTNC.c =================================================================== --- trunk/Lib/optimize/tnc/moduleTNC.c 2007-05-23 19:15:38 UTC (rev 3036) +++ trunk/Lib/optimize/tnc/moduleTNC.c 2007-05-24 08:01:29 UTC (rev 3037) @@ -288,7 +288,7 @@ return NULL; } - return Py_BuildValue("(iiN)", rc, nfeval, py_list);; + return Py_BuildValue("(Nii)", py_list, nfeval, rc); } static PyMethodDef moduleTNC_methods[] = Modified: trunk/Lib/optimize/tnc.py =================================================================== --- trunk/Lib/optimize/tnc.py 2007-05-23 19:15:38 UTC (rev 3036) +++ trunk/Lib/optimize/tnc.py 2007-05-24 08:01:29 UTC (rev 3037) @@ -34,8 +34,8 @@ (as a list of values); or None, to abort the minimization. """ -import moduleTNC -from numpy import asarray +from scipy.optimize import moduleTNC +from numpy import asarray, inf MSG_NONE = 0 # No messages MSG_ITER = 1 # One line per iteration @@ -53,9 +53,6 @@ MSG_ALL : "All messages" } -HUGE_VAL=1e500 # No standard representation of Infinity in Python 2.3.3 - # FIXME: can we use inf now that we have numpy and IEEE floats? - EINVAL = -2 # Invalid parameters (n<1) INFEASIBLE = -1 # Infeasible (low > up) LOCALMINIMUM = 0 # Local minima reach (|pg| ~= 0) @@ -92,84 +89,90 @@ """Minimize a function with variables subject to bounds, using gradient information. - returns (rc, nfeval, x). + :Parameters: - Inputs: + func : callable func(x, *args) + Function to minimize. + x0 : float + Initial guess to minimum. + fprime : callable fprime(x, *args) + Gradient of func. If None, then func must return the + function value and the gradient, e.g. + f,g = func(x,*args). + args : tuple + Arguments to pass to function. + approx_grad : bool + If true, approximate the gradient numerically. + bounds : list + (min, max) pairs for each element in x, defining the + bounds on that parameter. Use None for one of min or + max when there is no bound in that direction + scale : list of floats + Scaling factors to apply to each variable. If None, the + factors are up-low for interval bounded variables and + 1+|x] fo the others. Defaults to None. + messages : + Bit mask used to select messages display during + minimization values defined in the optimize.tnc.MSGS dict. + defaults to optimize.tnc.MGS_ALL. + maxCGit : int + Maximum number of hessian*vector evaluation per main + iteration. If maxCGit == 0, the direction chosen is + -gradient. If maxCGit < 0, maxCGit is set to + max(1,min(50,n/2)). Defaults to -1. + maxfun : int + Maximum number of function evaluation. If None, maxfun + is set to max(1000, 100*len(x0)). Defaults to None. + eta : float + Severity of the line search. if < 0 or > 1, set to 0.25. + Defaults to -1. + stepmx : float + Maximum step for the line search. May be increased during + call. If too small, will be set to 10.0. Defaults to 0. + accuracy : float + Relative precision for finite difference calculations. If + <= machine_precision, set to sqrt(machine_precision). + Defaults to 0. + fmin : float + Minimum function value estimate. Defaults to 0. + ftol : float + Precision goal for the value of f in the stoping criterion + relative to the machine precision and the value of f. If + ftol < 0.0, ftol is set to 0.0. Defaults to 0. + rescale : float + Scaling factor (in log10) used to trigger rescaling. If + 0, rescale at each iteration. If a large value, never + rescale. If < 0, rescale is set to 1.3. - func -- function to minimize. Called as func(x, *args) + :Returns: - x0 -- initial guess to minimum + x : list of floats + The solution. + nfeval : int + The number of function evaluations. + rc : + Return code (corresponding message in optimize.tnc.RCSTRINGS). - fprime -- gradient of func. If None, then func returns the function - value and the gradient ( f, g = func(x, *args) ). - Called as fprime(x, *args) + :SeeAlso: - args -- arguments to pass to function + - fmin, fmin_powell, fmin_cg, fmin_bfgs, fmin_ncg : multivariate + local optimizers - approx_grad -- if true, approximate the gradient numerically + - leastsq : nonlinear least squares minimizer - bounds -- a list of (min, max) pairs for each element in x, defining - the bounds on that parameter. Use None for one of min or max - when there is no bound in that direction + - fmin_l_bfgs_b, fmin_tnc, fmin_cobyla : constrained + multivariate optimizers - scale : scaling factors to apply to each variable (a list of floats) - if None, the factors are up-low for interval bounded variables - and 1+|x] fo the others. - defaults to None - messages : bit mask used to select messages display during minimization - values defined in the optimize.tnc.MSGS dict. - defaults to optimize.tnc.MGS_ALL - maxCGit : max. number of hessian*vector evaluation per main iteration - if maxCGit == 0, the direction chosen is -gradient - if maxCGit < 0, maxCGit is set to max(1,min(50,n/2)) - defaults to -1 - maxfun : max. number of function evaluation - if None, maxnfeval is set to max(1000, 100*len(x0)) - defaults to None - eta : severity of the line search. if < 0 or > 1, set to 0.25 - defaults to -1 - stepmx : maximum step for the line search. may be increased during call - if too small, will be set to 10.0 - defaults to 0 - accuracy : relative precision for finite difference calculations - if <= machine_precision, set to sqrt(machine_precision) - defaults to 0 - fmin : minimum function value estimate - defaults to 0 - ftol : precision goal for the value of f in the stoping criterion - relative to the machine precision and the value of f. - if ftol < 0.0, ftol is set to 0.0 - defaults to 0 - rescale : Scaling factor (in log10) used to trigger rescaling - if 0, rescale at each iteration - if a large value, never rescale - if < 0, rescale is set to 1.3 + - anneal, brute : global optimizers - Outputs: + - fminbound, brent, golden, bracket : local scalar minimizers - x : the solution (a list of floats) - nfeval : the number of function evaluations - rc : return code (corresponding message in optimize.tnc.RCSTRINGS) + - fsolve : n-dimenstional root-finding - See also: + - brentq, brenth, ridder, bisect, newton : one-dimensional root-finding - fmin, fmin_powell, fmin_cg, - fmin_bfgs, fmin_ncg -- multivariate local optimizers - leastsq -- nonlinear least squares minimizer + - fixed_point : scalar fixed-point finder - fmin_l_bfgs_b, fmin_tnc, - fmin_cobyla -- constrained multivariate optimizers - - anneal, brute -- global optimizers - - fminbound, brent, golden, bracket -- local scalar minimizers - - fsolve -- n-dimenstional root-finding - - brentq, brenth, ridder, bisect, newton -- one-dimensional root-finding - - fixed_point -- scalar fixed-point finder - """ n = len(x0) @@ -202,11 +205,11 @@ for i in range(n): l,u = bounds[i] if l is None: - low[i] = -HUGE_VAL + low[i] = -inf else: low[i] = l if u is None: - up[i] = HUGE_VAL + up[i] = inf else: up[i] = u @@ -236,7 +239,7 @@ return f, g # Optimizer call - rc, nf, x = fmin_tnc(function, [-7, 3], bounds=([-10, 10], [1, 10])) + x, nf, rc = fmin_tnc(function, [-7, 3], bounds=([-10, 10], [1, 10])) print "After", nf, "function evaluations, TNC returned:", RCSTRINGS[rc] print "x =", x @@ -244,98 +247,3 @@ print example() - - # Tests - # These tests are taken from Prof. K. Schittkowski test examples for - # constrained nonlinear programming. - # http://www.uni-bayreuth.de/departments/math/~kschittkowski/home.htm - tests = [] - def test1fg(x): - f = 100.0*pow((x[1]-pow(x[0],2)),2)+pow(1.0-x[0],2) - dif = [0,0] - dif[1] = 200.0*(x[1]-pow(x[0],2)) - dif[0] = -2.0*(x[0]*(dif[1]-1.0)+1.0) - return f, dif - tests.append ((test1fg, [-2,1], ([-HUGE_VAL,None],[-1.5,None]), [1,1])) - - def test2fg(x): - f = 100.0*pow((x[1]-pow(x[0],2)),2)+pow(1.0-x[0],2) - dif = [0,0] - dif[1] = 200.0*(x[1]-pow(x[0],2)) - dif[0] = -2.0*(x[0]*(dif[1]-1.0)+1.0) - return f, dif - tests.append ((test2fg, [-2,1], [(-HUGE_VAL,None),(1.5,None)], [-1.2210262419616387,1.5])) - - def test3fg(x): - f = x[1]+pow(x[1]-x[0],2)*1.0e-5 - dif = [0,0] - dif[0] = -2.0*(x[1]-x[0])*1.0e-5 - dif[1] = 1.0-dif[0] - return f, dif - tests.append ((test3fg, [10,1], [(-HUGE_VAL,None),(0.0, None)], [0,0])) - - def test4fg(x): - f = pow(x[0]+1.0,3)/3.0+x[1] - dif = [0,0] - dif[0] = pow(x[0]+1.0,2) - dif[1] = 1.0 - return f, dif - tests.append ((test4fg, [1.125,0.125], [(1, None),(0, None)], [1,0])) - - from math import * - - def test5fg(x): - f = sin(x[0]+x[1])+pow(x[0]-x[1],2)-1.5*x[0]+2.5*x[1]+1.0 - dif = [0,0] - v1 = cos(x[0]+x[1]); - v2 = 2.0*(x[0]-x[1]); - - dif[0] = v1+v2-1.5; - dif[1] = v1-v2+2.5; - return f, dif - tests.append ((test5fg, [0,0], [(-1.5, 4),(-3,3)], [-0.54719755119659763, -1.5471975511965976])) - - def test38fg(x): - f = (100.0*pow(x[1]-pow(x[0],2),2)+pow(1.0-x[0],2)+90.0*pow(x[3]-pow(x[2],2),2) \ - +pow(1.0-x[2],2)+10.1*(pow(x[1]-1.0,2)+pow(x[3]-1.0,2)) \ - +19.8*(x[1]-1.0)*(x[3]-1.0))*1.0e-5 - dif = [0,0,0,0] - dif[0] = (-400.0*x[0]*(x[1]-pow(x[0],2))-2.0*(1.0-x[0]))*1.0e-5 - dif[1] = (200.0*(x[1]-pow(x[0],2))+20.2*(x[1]-1.0)+19.8*(x[3]-1.0))*1.0e-5 - dif[2] = (-360.0*x[2]*(x[3]-pow(x[2],2))-2.0*(1.0-x[2]))*1.0e-5 - dif[3] = (180.0*(x[3]-pow(x[2],2))+20.2*(x[3]-1.0)+19.8*(x[1]-1.0))*1.0e-5 - return f, dif - tests.append ((test38fg, [-3,-1,-3,-1], [(-10,10)]*4, [1]*4)) - - def test45fg(x): - f = 2.0-x[0]*x[1]*x[2]*x[3]*x[4]/120.0 - dif = [0]*5 - dif[0] = -x[1]*x[2]*x[3]*x[4]/120.0 - dif[1] = -x[0]*x[2]*x[3]*x[4]/120.0 - dif[2] = -x[0]*x[1]*x[3]*x[4]/120.0 - dif[3] = -x[0]*x[1]*x[2]*x[4]/120.0 - dif[4] = -x[0]*x[1]*x[2]*x[3]/120.0 - return f, dif - tests.append ((test45fg, [2]*5, [(0,1),(0,2),(0,3),(0,4),(0,5)], [1,2,3,4,5])) - - def test(fg, x, bounds, xopt): - print "** Test", fg.__name__ - rc, nf, x = fmin_tnc(fg, x, bounds=bounds, messages = MSG_NONE, maxnfeval = 200) - print "After", nf, "function evaluations, TNC returned:", RCSTRINGS[rc] - print "x =", x - print "exact value =", xopt - enorm = 0.0 - norm = 1.0 - for y,yo in zip(x, xopt): - enorm += (y-yo)*(y-yo) - norm += yo*yo - e = pow(enorm/norm, 0.5) - print "Error =", e - if e > 1e-8: - raise "Test "+fg.__name__+" failed" - - for fg, x, bounds, xopt in tests: - test(fg, x, bounds, xopt) - - print - print "** All TNC tests passed." From scipy-svn at scipy.org Thu May 24 04:08:35 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 24 May 2007 03:08:35 -0500 (CDT) Subject: [Scipy-svn] r3038 - in trunk/Lib/optimize: . tests Message-ID: <20070524080835.F093839C126@new.scipy.org> Author: stefan Date: 2007-05-24 03:08:11 -0500 (Thu, 24 May 2007) New Revision: 3038 Modified: trunk/Lib/optimize/tests/test_optimize.py trunk/Lib/optimize/tnc.py Log: Minor cosmetic changes. Modified: trunk/Lib/optimize/tests/test_optimize.py =================================================================== --- trunk/Lib/optimize/tests/test_optimize.py 2007-05-24 08:01:29 UTC (rev 3037) +++ trunk/Lib/optimize/tests/test_optimize.py 2007-05-24 08:08:11 UTC (rev 3038) @@ -153,7 +153,7 @@ dif[0] = -2.0*(x[0]*(dif[1]-1.0)+1.0) return f, dif self.tests.append((test2fg, [-2,1], [(-inf,None),(1.5,None)], - [-1.2210262419616387,1.5])) + [-1.2210262419616387,1.5])) def test3fg(x): f = x[1]+pow(x[1]-x[0],2)*1.0e-5 @@ -199,7 +199,7 @@ dif[3] = (180.0*(x[3]-pow(x[2],2))+20.2\ *(x[3]-1.0)+19.8*(x[1]-1.0))*1.0e-5 return f, dif - self.tests.append ((test38fg, [-3,-1,-3,-1], [(-10,10)]*4, [1]*4)) + self.tests.append((test38fg, [-3,-1,-3,-1], [(-10,10)]*4, [1]*4)) def test45fg(x): f = 2.0-x[0]*x[1]*x[2]*x[3]*x[4]/120.0 @@ -210,8 +210,8 @@ dif[3] = -x[0]*x[1]*x[2]*x[4]/120.0 dif[4] = -x[0]*x[1]*x[2]*x[3]/120.0 return f, dif - self.tests.append ((test45fg, [2]*5, [(0,1),(0,2),(0,3),(0,4),(0,5)], - [1,2,3,4,5])) + self.tests.append((test45fg, [2]*5, [(0,1),(0,2),(0,3),(0,4),(0,5)], + [1,2,3,4,5])) def test_tnc(self): for fg, x, bounds, xopt in self.tests: Modified: trunk/Lib/optimize/tnc.py =================================================================== --- trunk/Lib/optimize/tnc.py 2007-05-24 08:01:29 UTC (rev 3037) +++ trunk/Lib/optimize/tnc.py 2007-05-24 08:08:11 UTC (rev 3038) @@ -104,9 +104,9 @@ approx_grad : bool If true, approximate the gradient numerically. bounds : list - (min, max) pairs for each element in x, defining the - bounds on that parameter. Use None for one of min or - max when there is no bound in that direction + (min, max) pairs for each element in x, defining the + bounds on that parameter. Use None for one of min or max + when there is no bound in that direction scale : list of floats Scaling factors to apply to each variable. If None, the factors are up-low for interval bounded variables and From scipy-svn at scipy.org Thu May 24 08:30:14 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 24 May 2007 07:30:14 -0500 (CDT) Subject: [Scipy-svn] r3039 - in trunk/Lib/sandbox/lobpcg: . tests Message-ID: <20070524123014.713E139C032@new.scipy.org> Author: rc Date: 2007-05-24 07:30:05 -0500 (Thu, 24 May 2007) New Revision: 3039 Added: trunk/Lib/sandbox/lobpcg/tests/ trunk/Lib/sandbox/lobpcg/tests/benchmark.py trunk/Lib/sandbox/lobpcg/tests/test_lobpcg.py Modified: trunk/Lib/sandbox/lobpcg/info.py trunk/Lib/sandbox/lobpcg/lobpcg.py trunk/Lib/sandbox/lobpcg/setup.py Log: more docs, examples and return values Modified: trunk/Lib/sandbox/lobpcg/info.py =================================================================== --- trunk/Lib/sandbox/lobpcg/info.py 2007-05-24 08:08:11 UTC (rev 3038) +++ trunk/Lib/sandbox/lobpcg/info.py 2007-05-24 12:30:05 UTC (rev 3039) @@ -1,16 +1,88 @@ """ The algorithm of LOBPCG is described in detail in: -A. V. Knyazev, Toward the Optimal Preconditioned Eigensolver: Locally Optimal Block Preconditioned Conjugate Gradient Method. SIAM Journal on Scientific Computing 23 (2001), no. 2, pp. 517-541. http://dx.doi.org/10.1137/S1064827500366124 +A. V. Knyazev, Toward the Optimal Preconditioned Eigensolver: Locally Optimal +Block Preconditioned Conjugate Gradient Method. SIAM Journal on Scientific +Computing 23 (2001), no. 2, +pp. 517-541. http://dx.doi.org/10.1137/S1064827500366124 -A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov, Block Locally Optimal Preconditioned Eigenvalue Xolvers (BLOPEX) in hypre and PETSc (2007). http://arxiv.org/abs/0705.2626 +A. V. Knyazev, I. Lashuk, M. E. Argentati, and E. Ovchinnikov, Block Locally +Optimal Preconditioned Eigenvalue Xolvers (BLOPEX) in hypre and PETSc +(2007). http://arxiv.org/abs/0705.2626 +Call the function lobpcg - see help for lobpcg.lobpcg. See also lobpcg.as2d, +which can be used in the preconditioner (example below) -Depends upon symeig (http://mdp-toolkit.sourceforge.net/symeig.html) for the -moment, as the symmetric eigenvalue solvers were not available in scipy. +Example: -Usage: XXXXX + # Solve A x = lambda B x with constraints and preconditioning. + n = 100 + vals = [nm.arange( n, dtype = nm.float64 ) + 1] + + # Matrix A. + operatorA = spdiags( vals, 0, n, n ) + + # Matrix B + operatorB = nm.eye( n, n ) + + # Constraints. + Y = nm.eye( n, 3 ) + + # Initial guess for eigenvectors, should have linearly independent + # columns. Column dimension = number of requested eigenvalues. + X = sc.rand( n, 3 ) + + # Preconditioner - inverse of A. + ivals = [1./vals[0]] + def precond( x ): + invA = spdiags( ivals, 0, n, n ) + y = invA * x + if sp.issparse( y ): + y = y.toarray() + + return as2d( y ) + + # Alternative way of providing the same preconditioner. + #precond = spdiags( ivals, 0, n, n ) + + tt = time.clock() + eigs, vecs = lobpcg( X, operatorA, operatorB, blockVectorY = Y, + operatorT = precond, + residualTolerance = 1e-4, maxIterations = 40, + largest = False, verbosityLevel = 1 ) + print 'solution time:', time.clock() - tt + print eigs + +Usage notes: + +Notation: n - matrix size, m - number of required eigenvalues (smallest or +largest) + +1) The LOBPCG code internally solves eigenproblems of the size 3m on every + iteration by calling the"standard" eigensolver, so if m is not small enough + compared to n, it does not make sense to call the LOBPCG code, but rather + one should use the "standard" eigensolver, e.g. symeig function in this + case. If one calls the LOBPCG algorithm for 5m>n, it will most likely break + internally, so the code tries to call symeig instead. + + It is not that n should be large for the LOBPCG to work, but rather the + ratio n/m should be large. It you call the LOBPCG code with m=1 and n=10, it + should work, though n is small. The method is intended for extremely large + n/m, see e.g., reference [28] in http://arxiv.org/abs/0705.2626 + +2) The convergence speed depends basically on two factors: + + a) how well relatively separated the seeking eigenvalues are from the rest of + the eigenvalues. One can try to vary m to make this better. + + b) how "well conditioned" the problem is. This can be changed by using proper + preconditioning. For example, a rod vibration test problem (under tests + directory) is ill-conditioned + for large n, so convergence will be slow, unless efficient preconditioning is + used. For this specific problem, a good simple preconditioner function would + be a linear solve for A, which is easy to code since A is tridiagonal. + """ postpone_import = 1 Modified: trunk/Lib/sandbox/lobpcg/lobpcg.py =================================================================== --- trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-24 08:08:11 UTC (rev 3038) +++ trunk/Lib/sandbox/lobpcg/lobpcg.py 2007-05-24 12:30:05 UTC (rev 3039) @@ -5,7 +5,12 @@ License: BSD +Depends upon symeig (http://mdp-toolkit.sourceforge.net/symeig.html) for the +moment, as the symmetric eigenvalue solvers were not available in scipy. + (c) Robert Cimrman, Andrew Knyazev + +Examples in tests directory contributed by Nils Wagner. """ import numpy as nm @@ -25,6 +30,10 @@ ## # 21.05.2007, c def as2d( ar ): + """ + If the input array is 2D return it, if it is 1D, append a dimension, + making it a column vector. + """ if ar.ndim == 2: return ar else: # Assume 1! @@ -35,11 +44,23 @@ ## # 05.04.2007, c # 10.04.2007 +# 24.05.2007 def makeOperator( operatorInput, expectedShape ): + """ + Internal. Takes a dense numpy array or a sparse matrix or a function and + makes an operator performing matrix * vector product. + + :Example: + + operatorA = makeOperator( arrayA, (n, n) ) + vectorB = operatorA( vectorX ) + """ class Operator( object ): def __call__( self, vec ): return self.call( vec ) - + def asMatrix( self ): + return self._asMatrix( self ) + operator = Operator() operator.obj = operatorInput @@ -55,23 +76,30 @@ if sp.issparse( out ): out = out.toarray() return as2d( out ) + def asMatrix( op ): + return op.obj.toarray() else: def call( vec ): return as2d( nm.asarray( sc.dot( operator.obj, vec ) ) ) + def asMatrix( op ): + return op.obj operator.call = call + operator._asMatrix = asMatrix + operator.kind = 'matrix' elif isinstance( operatorInput, types.FunctionType ) or \ isinstance( operatorInput, types.BuiltinFunctionType ): operator.shape = expectedShape operator.dtype = nm.float64 operator.call = operatorInput + operator.kind = 'function' return operator ## # 05.04.2007, c def applyConstraints( blockVectorV, factYBY, blockVectorBY, blockVectorY ): - """Changes blockVectorV in place.""" + """Internal. Changes blockVectorV in place.""" gramYBV = sc.dot( blockVectorBY.T, blockVectorV ) tmp = la.cho_solve( factYBY, gramYBV ) blockVectorV -= sc.dot( blockVectorY, tmp ) @@ -80,7 +108,7 @@ # 05.04.2007, c def b_orthonormalize( operatorB, blockVectorV, blockVectorBV = None, retInvR = False ): - + """Internal.""" if blockVectorBV is None: if operatorB is not None: blockVectorBV = operatorB( blockVectorV ) @@ -104,14 +132,68 @@ # 05.04.2007 # 06.04.2007 # 10.04.2007 +# 24.05.2007 def lobpcg( blockVectorX, operatorA, operatorB = None, operatorT = None, blockVectorY = None, residualTolerance = None, maxIterations = 20, largest = True, verbosityLevel = 0, retLambdaHistory = False, retResidualNormsHistory = False ): + """ + LOBPCG solves symmetric partial eigenproblems using preconditioning. - exitFlag = 0 + Required input: + blockVectorX - initial approximation to eigenvectors, full or sparse matrix + n-by-blockSize + + operatorA - the operator of the problem, can be given as a matrix or as an + M-file + + + Optional input: + + operatorB - the second operator, if solving a generalized eigenproblem; by + default, or if empty, operatorB = I. + + operatorT - preconditioner; by default, operatorT = I. + + + Optional constraints input: + + blockVectorY - n-by-sizeY matrix of constraints, sizeY < n. The iterations + will be performed in the (operatorB-) orthogonal complement of the + column-space of blockVectorY. blockVectorY must be full rank. + + + Optional scalar input parameters: + + residualTolerance - tolerance, by default, residualTolerance=n*sqrt(eps) + + maxIterations - max number of iterations, by default, maxIterations = + min(n,20) + + largest - when true, solve for the largest eigenvalues, otherwise for the + smallest + + verbosityLevel - by default, verbosityLevel = 0. + + retLambdaHistory - return eigenvalue history + + retResidualNormsHistory - return history of residual norms + + Output: + + blockVectorX and lambda are computed blockSize eigenpairs, where + blockSize=size(blockVectorX,2) for the initial guess blockVectorX if it is + full rank. + + If both retLambdaHistory and retResidualNormsHistory are True, the + return tuple has the flollowing order: + + lambda, blockVectorX, lambda history, residual norms history + """ + failureFlag = True + if blockVectorY is not None: sizeY = blockVectorY.shape[1] else: @@ -133,6 +215,33 @@ if operatorB is not None: operatorB = makeOperator( operatorB, (n, n) ) + if (n - sizeY) < (5 * sizeX): + print 'The problem size is too small, compared to the block size, for LOBPCG to run.' + print 'Trying to use symeig instead, without preconditioning.' + if blockVectorY is not None: + print 'symeig does not support constraints' + raise ValueError + + if largest: + lohi = (n - sizeX, n) + else: + lohi = (1, sizeX) + + if operatorA.kind == 'function': + print 'symeig does not support matrix A given by function' + + if operatorB is not None: + if operatorB.kind == 'function': + print 'symeig does not support matrix B given by function' + + _lambda, eigBlockVector = symeig( operatorA.asMatrix(), + operatorB.asMatrix(), + range = lohi ) + else: + _lambda, eigBlockVector = symeig( operatorA.asMatrix(), + range = lohi ) + return _lambda, eigBlockVector + if operatorT is not None: operatorT = makeOperator( operatorT, (n, n) ) ## if n != operatorA.shape[0]: @@ -254,9 +363,8 @@ previousBlockSize = currentBlockSize ident = nm.eye( currentBlockSize, dtype = operatorA.dtype ) - if currentBlockSize == 0: - failureFlag = 0 # All eigenpairs converged. + failureFlag = False # All eigenpairs converged. break if verbosityLevel > 0: @@ -377,6 +485,9 @@ _lambda = _lambda[ii].astype( nm.float64 ) eigBlockVector = nm.asarray( eigBlockVector[:,ii].astype( nm.float64 ) ) + + lambdaHistory.append( _lambda ) + if verbosityLevel > 10: print 'lambda:', _lambda ## # Normalize eigenvectors! @@ -437,9 +548,17 @@ print 'final eigenvalue:', _lambda print 'final residual norms:', residualNorms + if retLambdaHistory: + if retResidualNormsHistory: + return _lambda, eigBlockVectorX, lambdaHistory, residualNormsHistory + else: + return _lambda, eigBlockVectorX, lambdaHistory + else: + if retResidualNormsHistory: + return _lambda, eigBlockVectorX, residualNormsHistory + else: + return _lambda, eigBlockVectorX - return _lambda, eigBlockVectorX - ########################################################################### if __name__ == '__main__': from scipy.sparse import spdiags, speye @@ -457,7 +576,7 @@ Y = nm.eye( n, 3 ) -## X = sc.rand( n, 3 ) +# X = sc.rand( n, 3 ) xfile = {100 : 'X.txt', 1000 : 'X2.txt', 10000 : 'X3.txt'} X = nm.fromfile( xfile[n], dtype = nm.float64, sep = ' ' ) X.shape = (n, 3) @@ -471,6 +590,8 @@ return as2d( y ) +# precond = spdiags( ivals, 0, n, n ) + tt = time.clock() eigs, vecs = lobpcg( X, operatorA, operatorB, blockVectorY = Y, operatorT = precond, Modified: trunk/Lib/sandbox/lobpcg/setup.py =================================================================== --- trunk/Lib/sandbox/lobpcg/setup.py 2007-05-24 08:08:11 UTC (rev 3038) +++ trunk/Lib/sandbox/lobpcg/setup.py 2007-05-24 12:30:05 UTC (rev 3039) @@ -7,7 +7,7 @@ from numpy.distutils.system_info import get_info config = Configuration('lobpcg',parent_package,top_path) -# config.add_data_dir('tests') + config.add_data_dir('tests') return config Added: trunk/Lib/sandbox/lobpcg/tests/benchmark.py =================================================================== --- trunk/Lib/sandbox/lobpcg/tests/benchmark.py 2007-05-24 08:08:11 UTC (rev 3038) +++ trunk/Lib/sandbox/lobpcg/tests/benchmark.py 2007-05-24 12:30:05 UTC (rev 3039) @@ -0,0 +1,64 @@ +from scipy import * +from scipy.sandbox import lobpcg +from symeig import symeig +from pylab import plot, show, legend, xlabel, ylabel +set_printoptions(precision=3,linewidth=90) +import time + +def test(n): + x = arange(1,n+1) + B = diag(1./x) + y = arange(n-1,0,-1) + z = arange(2*n-1,0,-2) + A = diag(z)-diag(y,-1)-diag(y,1) + return A,B + +def as2d( ar ): + if ar.ndim == 2: + return ar + else: # Assume 1! + aux = nm.array( ar, copy = False ) + aux.shape = (ar.shape[0], 1) + return aux + +def precond(x): + y= linalg.cho_solve((LorU, lower),x) + return as2d(y) + +m = 10 # Blocksize +N = array(([128,256,512,1024,2048])) # Increasing matrix size + +data1=[] +data2=[] + +for n in N: + print '******', n + A,B = test(n) # Mikota pair + X = rand(n,m) + X = linalg.orth(X) + + tt = time.clock() + (LorU, lower) = linalg.cho_factor(A, lower=0, overwrite_a=0) + eigs,vecs = lobpcg.lobpcg(X,A,B,operatorT = precond, + residualTolerance = 1e-4, maxIterations = 40) + data1.append(time.clock()-tt) + eigs = sort(eigs) + print + print 'Results by LOBPCG' + print + print n,eigs + + tt = time.clock() + w,v=symeig(A,B,range=(1,m)) + data2.append(time.clock()-tt) + print + print 'Results by symeig' + print + print n, w + +xlabel(r'Size $n$') +ylabel(r'Elapsed time $t$') +plot(N,data1,label='LOBPCG') +plot(N,data2,label='SYMEIG') +legend() +show() Added: trunk/Lib/sandbox/lobpcg/tests/test_lobpcg.py =================================================================== --- trunk/Lib/sandbox/lobpcg/tests/test_lobpcg.py 2007-05-24 08:08:11 UTC (rev 3038) +++ trunk/Lib/sandbox/lobpcg/tests/test_lobpcg.py 2007-05-24 12:30:05 UTC (rev 3039) @@ -0,0 +1,47 @@ +from scipy import * +from scipy.sandbox import lobpcg +from symeig import symeig +from pylab import plot, show, legend, xlabel, ylabel +set_printoptions(precision=3,linewidth=90) + +def test1(n): + L = 1.0 + le=L/n + rho = 7.85e3 + S = 1.e-4 + E = 2.1e11 + mass = rho*S*le/6. + k = E*S/le + A = k*(diag(r_[2.*ones(n-1),1])-diag(ones(n-1),1)-diag(ones(n-1),-1)) + B = mass*(diag(r_[4.*ones(n-1),2])+diag(ones(n-1),1)+diag(ones(n-1),-1)) + return A,B + +def test2(n): + x = arange(1,n+1) + B = diag(1./x) + y = arange(n-1,0,-1) + z = arange(2*n-1,0,-2) + A = diag(z)-diag(y,-1)-diag(y,1) + return A,B + +n = 100 # Dimension + +A,B = test1(n) # Fixed-free elastic rod +A,B = test2(n) # Mikota pair acts as a nice test since the eigenvalues are the squares of the integers n, n=1,2,... + +m = 20 +V = rand(n,m) +X = linalg.orth(V) + +eigs,vecs = lobpcg.lobpcg(X,A,B) +eigs = sort(eigs) + +w,v=symeig(A,B) + + +plot(arange(0,len(w[:m])),w[:m],'bx',label='Results by symeig') +plot(arange(0,len(eigs)),eigs,'r+',label='Results by lobpcg') +legend() +xlabel(r'Eigenvalue $i$') +ylabel(r'$\lambda_i$') +show() From scipy-svn at scipy.org Thu May 24 13:12:01 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 24 May 2007 12:12:01 -0500 (CDT) Subject: [Scipy-svn] r3040 - in trunk/Lib/sandbox/timeseries: src tests Message-ID: <20070524171201.080FD39C100@new.scipy.org> Author: mattknox_ca Date: 2007-05-24 12:11:57 -0500 (Thu, 24 May 2007) New Revision: 3040 Modified: trunk/Lib/sandbox/timeseries/src/c_tdates.c trunk/Lib/sandbox/timeseries/tests/test_dates.py Log: - fixed a memory leak in DateObject_asfreq - fixed a problem with converting A-NOV freq to daily - raise error when frequency conversion fails instead of returning invalid result Modified: trunk/Lib/sandbox/timeseries/src/c_tdates.c =================================================================== --- trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-24 12:30:05 UTC (rev 3039) +++ trunk/Lib/sandbox/timeseries/src/c_tdates.c 2007-05-24 17:11:57 UTC (rev 3040) @@ -770,8 +770,11 @@ static long asfreq_AtoD(long fromDate, char relation, asfreq_info *af_info) { long absdate, year, final_adj; - int month = (af_info->from_a_year_end + 1) % 12; + int month = (af_info->from_a_year_end) % 12; + if (month == 0) { month = 1; } + else { month += 1; } + if (relation == 'B') { if (af_info->from_a_year_end == 12) {year = fromDate;} else {year = fromDate - 1;} @@ -1384,7 +1387,9 @@ &freq, &value, &string, &year, &month, &day, &quarter, &hour, &minute, &second, - &datetime)) return -1; + &datetime)) { + return -1; + } if (PyObject_HasAttrString(freq, "freq")) { PyObject *freq_attr = PyObject_GetAttrString(freq, "freq"); @@ -1610,8 +1615,13 @@ strcmp(relation_uc, "B") == 0 || strcmp(relation_uc, "AFTER") == 0 || strcmp(relation_uc, "A") == 0) { - relation = relation_uc[0]; + if(relation_uc[0] == 'A') { relation = 'A'; } + else { relation = 'B'; } + } else { invalid_relation=1; } + + free(relation_uc); + } else { invalid_relation=1; } @@ -1631,6 +1641,8 @@ result_val = asfreq_func(self->value, relation, &af_info); + if (result_val == INT_ERR_CODE) return NULL; + result->freq = toFreq; result->value = result_val; Modified: trunk/Lib/sandbox/timeseries/tests/test_dates.py =================================================================== --- trunk/Lib/sandbox/timeseries/tests/test_dates.py 2007-05-24 12:30:05 UTC (rev 3039) +++ trunk/Lib/sandbox/timeseries/tests/test_dates.py 2007-05-24 17:11:57 UTC (rev 3040) @@ -260,6 +260,7 @@ date_AJAN = dWrap(Date(freq=C.FR_ANNJAN, year=2007)) date_AJUN = dWrap(Date(freq=C.FR_ANNJUN, year=2007)) + date_ANOV = dWrap(Date(freq=C.FR_ANNNOV, year=2007)) date_A_to_Q_before = dWrap(Date(freq='Q', year=2007, quarter=1)) date_A_to_Q_after = dWrap(Date(freq='Q', year=2007, quarter=4)) @@ -288,6 +289,8 @@ date_AJAN_to_D_before = dWrap(Date(freq='D', year=2006, month=2, day=1)) date_AJUN_to_D_after = dWrap(Date(freq='D', year=2007, month=6, day=30)) date_AJUN_to_D_before = dWrap(Date(freq='D', year=2006, month=7, day=1)) + date_ANOV_to_D_after = dWrap(Date(freq='D', year=2007, month=11, day=30)) + date_ANOV_to_D_before = dWrap(Date(freq='D', year=2006, month=12, day=1)) assert_func(date_A.asfreq('Q', "BEFORE"), date_A_to_Q_before) assert_func(date_A.asfreq('Q', "AFTER"), date_A_to_Q_after) @@ -312,6 +315,9 @@ assert_func(date_AJUN.asfreq('D', "BEFORE"), date_AJUN_to_D_before) assert_func(date_AJUN.asfreq('D', "AFTER"), date_AJUN_to_D_after) + assert_func(date_ANOV.asfreq('D', "BEFORE"), date_ANOV_to_D_before) + assert_func(date_ANOV.asfreq('D', "AFTER"), date_ANOV_to_D_after) + def test_conv_quarterly(self): "frequency conversion tests: from Quarterly Frequency" From scipy-svn at scipy.org Fri May 25 11:27:43 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 25 May 2007 10:27:43 -0500 (CDT) Subject: [Scipy-svn] r3041 - trunk/Lib/sandbox/timeseries/plotlib Message-ID: <20070525152743.0487139C030@new.scipy.org> Author: mattknox_ca Date: 2007-05-25 10:27:39 -0500 (Fri, 25 May 2007) New Revision: 3041 Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py Log: added additional frequency support for plots. All frequencies <= daily are now supported (eg. the various annual and quarterly frequencies, weekly, etc) Modified: trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py =================================================================== --- trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-24 17:11:57 UTC (rev 3040) +++ trunk/Lib/sandbox/timeseries/plotlib/mpl_timeseries.py 2007-05-25 15:27:39 UTC (rev 3041) @@ -175,8 +175,13 @@ if freq == _c.FR_BUS: periodsperyear = 261 + periodspermonth = 19 elif freq == _c.FR_DAY: periodsperyear = 365 + periodspermonth = 28 + elif TS.get_freq_group(freq) == _c.FR_WK: + periodsperyear = 52 + periodspermonth = 3 else: raise ValueError("unexpected frequency") @@ -195,7 +200,7 @@ else: return label_flags[0] # Case 1. Less than a month - if span <= (periodsperyear//12 - 2): + if span <= periodspermonth: month_start = period_break(dates,'month') if aslocator: major = default[month_start] @@ -377,7 +382,7 @@ return dict([(d,f) for (d,f) in zip(dates[formatted],format[formatted])]) #............................................................................... def _quarterly_finder(vmin, vmax, freq, aslocator): - if freq != _c.FR_QTR: + if TS.get_freq_group(freq) != _c.FR_QTR: raise ValueError("unexpected frequency") periodsperyear = 4 (vmin, vmax) = (int(vmin), int(vmax)) @@ -394,20 +399,20 @@ minor = dates else: format[:] = 'Q%q' - format[year_start] = 'Q%q\n%Y' + format[year_start] = 'Q%q\n%F' if not has_level_label(year_start): if dates.size > 1: idx = 1 else: idx = 0 - format[idx] = 'Q%q\n%Y' + format[idx] = 'Q%q\n%F' #............................................ elif span <= 11 * periodsperyear: if aslocator: major = dates[year_start] minor = dates else: - format[year_start] = '%Y' + format[year_start] = '%F' #............................................ else: years = dates[year_start]//4 + 1 @@ -418,7 +423,7 @@ major = dates[major_idx] minor = dates[year_start[(years % min_anndef == 0)]] else: - format[major_idx] = '%Y' + format[major_idx] = '%F' #............................................ if aslocator: return minor, major @@ -466,11 +471,11 @@ #..... if fgroup == _c.FR_ANN: self.finder = _annual_finder - elif freq == _c.FR_QTR: + elif fgroup == _c.FR_QTR: self.finder = _quarterly_finder elif freq == _c.FR_MTH: self.finder = _monthly_finder - elif freq in (_c.FR_BUS, _c.FR_DAY): + elif freq in (_c.FR_BUS, _c.FR_DAY) or fgroup == _c.FR_WK: self.finder = _daily_finder def asminor(self): @@ -536,11 +541,11 @@ #..... if fgroup == _c.FR_ANN: self.finder = _annual_finder - elif freq == _c.FR_QTR: + elif fgroup == _c.FR_QTR: self.finder = _quarterly_finder elif freq == _c.FR_MTH: self.finder = _monthly_finder - elif freq in (_c.FR_BUS, _c.FR_DAY): + elif freq in (_c.FR_BUS, _c.FR_DAY) or fgroup == _c.FR_WK: self.finder = _daily_finder def asminor(self): From scipy-svn at scipy.org Fri May 25 12:12:04 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 25 May 2007 11:12:04 -0500 (CDT) Subject: [Scipy-svn] r3042 - trunk/Lib/sandbox/lobpcg/tests Message-ID: <20070525161204.2608B39C030@new.scipy.org> Author: rc Date: 2007-05-25 11:12:00 -0500 (Fri, 25 May 2007) New Revision: 3042 Added: trunk/Lib/sandbox/lobpcg/tests/large_scale.py Log: a new example Added: trunk/Lib/sandbox/lobpcg/tests/large_scale.py =================================================================== --- trunk/Lib/sandbox/lobpcg/tests/large_scale.py 2007-05-25 15:27:39 UTC (rev 3041) +++ trunk/Lib/sandbox/lobpcg/tests/large_scale.py 2007-05-25 16:12:00 UTC (rev 3042) @@ -0,0 +1,51 @@ +from scipy import * +from scipy.sandbox import lobpcg +from scipy.sparse import spdiags, speye +from pylab import loglog, show, xlabel, ylabel, title +set_printoptions(precision=8,linewidth=90) +import time + +def sakurai(n): + """ Example taken from + T. Sakurai, H. Tadano, Y. Inadomi and U. Nagashima + A moment-based method for large-scale generalized eigenvalue problems + Appl. Num. Anal. Comp. Math. Vol. 1 No. 2 (2004) """ + + A = speye( n, n ) + d0 = array(r_[5,6*ones(n-2),5]) + d1 = -4*ones(n) + d2 = ones(n) + B = spdiags([d2,d1,d0,d1,d2],[-2,-1,0,1,2],n,n) + + k = arange(1,n+1) + w_ex = sort(1./(16.*pow(cos(0.5*k*pi/(n+1)),4))) # exact eigenvalues + + return A,B, w_ex + +m = 3 # Blocksize + +# +# Large scale +# +n = 25000 +A,B, w_ex = sakurai(n) # Mikota pair +X = rand(n,m) +data=[] +tt = time.clock() +eigs,vecs, resnh = lobpcg.lobpcg(X,A,B, + residualTolerance = 1e-6, maxIterations =500, retResidualNormsHistory=1) +data.append(time.clock()-tt) +print 'Results by LOBPCG for n='+str(n) +print +print eigs +print +print 'Exact eigenvalues' +print +print w_ex[:m] +print +print 'Elapsed time',data[0] +loglog(arange(1,n+1),w_ex,'b.') +xlabel(r'Number $i$') +ylabel(r'$\lambda_i$') +title('Eigenvalue distribution') +show() From scipy-svn at scipy.org Fri May 25 14:31:41 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 25 May 2007 13:31:41 -0500 (CDT) Subject: [Scipy-svn] r3043 - in trunk/Lib/special: cephes tests Message-ID: <20070525183141.CAC6539C1AA@new.scipy.org> Author: cookedm Date: 2007-05-25 13:31:38 -0500 (Fri, 25 May 2007) New Revision: 3043 Modified: trunk/Lib/special/cephes/hyp2f1.c trunk/Lib/special/tests/test_basic.py Log: #424: hyp2f1 gives wrong value for specific values. Plug a singularity in a transformation by averaging over it Modified: trunk/Lib/special/cephes/hyp2f1.c =================================================================== --- trunk/Lib/special/cephes/hyp2f1.c 2007-05-25 16:12:00 UTC (rev 3042) +++ trunk/Lib/special/cephes/hyp2f1.c 2007-05-25 18:31:38 UTC (rev 3043) @@ -181,8 +181,10 @@ double t1; t1 = fabs(b - a); if (fabs(t1 - round(t1)) < EPS) { - /* this transformation has a pole for b-a= +-integer */ - goto hypdiv; + /* this transformation has a pole for b-a= +-integer, + so we average around it. + */ + return 0.5*(hyp2f1(a, b*(1+1e-9), c, x) + hyp2f1(a, b*(1-1e-9), c, x)); } p = hyp2f1(a, 1-c+a, 1-b+a, 1.0/x); q = hyp2f1(b, 1-c+b, 1-a+b, 1.0/x); Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-25 16:12:00 UTC (rev 3042) +++ trunk/Lib/special/tests/test_basic.py 2007-05-25 18:31:38 UTC (rev 3043) @@ -1171,6 +1171,9 @@ gamma(1+5-2)/gamma(1+0.5*5-2)/gamma(0.5+0.5*5)], [4, 0.5+4, 1.5-2*4, -1./3, (8./9)**(-2*4)*gamma(4./3)* gamma(1.5-2*4)/gamma(3./2)/gamma(4./3-2*4)], + # and some others + # ticket #424 + [1.5, -0.5, 1.0, -10.0, 4.1300097765277476484], ] for i, (a, b, c, x, v) in enumerate(values): cv = hyp2f1(a, b, c, x) From scipy-svn at scipy.org Fri May 25 20:59:09 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Fri, 25 May 2007 19:59:09 -0500 (CDT) Subject: [Scipy-svn] r3044 - in trunk/Lib/interpolate: . fitpack Message-ID: <20070526005909.0C56139C04F@new.scipy.org> Author: oliphant Date: 2007-05-25 19:58:59 -0500 (Fri, 25 May 2007) New Revision: 3044 Modified: trunk/Lib/interpolate/__fitpack.h trunk/Lib/interpolate/_fitpackmodule.c trunk/Lib/interpolate/fitpack/fpader.f trunk/Lib/interpolate/fitpack/fpbspl.f trunk/Lib/interpolate/fitpack/splev.f trunk/Lib/interpolate/interpolate.py Log: Fix interpolate so that it uses deBoor's algorithm like fitpack but with extra parameters as developed. Added a C-implementation of deBoor's algorithm for computing B-spline values. Modified: trunk/Lib/interpolate/__fitpack.h =================================================================== --- trunk/Lib/interpolate/__fitpack.h 2007-05-25 18:31:38 UTC (rev 3043) +++ trunk/Lib/interpolate/__fitpack.h 2007-05-26 00:58:59 UTC (rev 3044) @@ -588,5 +588,319 @@ } +static void +_deBoor(double *t, double x, int k, int ell, double *result) { + /* On completion the result array stores + the k+1 non-zero values of beta_i,k(x): for i=ell, ell-1, ell-2, ell-k. + Where t[ell] <= x < t[ell+1]. + */ + /* Implements the recursive algorithm of deBoor and Cox + Equivalent to what is done in fpbspl.f in fitpack except + their is no upper bound on the order of the spline. + */ + double *hh = result + k + 1; + double *h = result; + double xb, xa, w; + int ind, j, m; + + result[0] = 1.0; + for (j=1; j<=k; j++) { + memcpy(hh, h, j*sizeof(double)); + h[0] = 0.0; + for (m=1; m<=j; m++) { + ind = ell + m; + xb = t[ind]; + xa = t[ind-j]; + if (xb == xa) { + h[m] = 0.0; + continue; + } + w = hh[m-1]/(xb-xa); + h[m-1] += w*(xb-x); + h[m] = w*(x-xa); + } + } +} + +/* Given a set of (N+1) samples: A default set of knots is constructed + using the samples xk plus 2*(K-1) additional knots where + K = max(order,1) and the knots are chosen so that distances + are symmetric around the first and last samples: x_0 and x_N. + + There should be a vector of N+K coefficients for the spline + curve in coef. These coefficients form the curve as + + s(x) = sum(c_j B_{j,K}(x), j=-K..N-1) + + The spline function is evaluated at all points xx. + The approximation interval is from xk[0] to xk[-1] + Any xx outside that interval is set automatically to 0.0 + */ +static char doc_bspleval[] = "y = _bspleval(xx,xk,coef,order)"; +static PyObject *_bspleval(PyObject *dummy, PyObject *args) { + int k,kk,N,i,ell; + PyObject *xx_py=NULL, *coef_py=NULL, *x_i_py=NULL; + PyArrayObject *xx=NULL, *coef=NULL, *x_i=NULL, *yy=NULL; + PyArrayIterObject *xx_iter; + double *t=NULL, *h=NULL, *ptr; + double x0, xN, xN1, arg, sp, cval; + if (!PyArg_ParseTuple(args, "OOOi", &xx_py, &x_i_py, &coef_py, &k)) + return NULL; + if (k < 0) { + PyErr_Format(PyExc_ValueError, "order (%d) must be >=0", k); + return NULL; + } + kk = k; + if (k==0) kk = 1; + x_i = (PyArrayObject *)PyArray_FROMANY(x_i_py, NPY_DOUBLE, 1, 1, NPY_ALIGNED); + coef = (PyArrayObject *)PyArray_FROMANY(coef_py, NPY_DOUBLE, 1, 1, NPY_ALIGNED); + xx = (PyArrayObject *)PyArray_FROMANY(xx_py, NPY_DOUBLE, 0, 0, NPY_ALIGNED); + if (x_i == NULL || coef == NULL || xx == NULL) goto fail; + + N = PyArray_DIM(x_i,0)-1; + + if (PyArray_DIM(coef,0) < (N+kk)) { + PyErr_Format(PyExc_ValueError, "too few coefficients (have %d need at least %d)", + PyArray_DIM(coef,0), N+kk); + goto fail; + } + + /* create output values */ + yy = (PyArrayObject *)PyArray_EMPTY(xx->nd, xx->dimensions, NPY_DOUBLE, 0); + if (yy == NULL) goto fail; + /* create dummy knot array with new knots inserted at the end + selected as mirror symmetric versions of the old knots + */ + t = (double *)malloc(sizeof(double)*(N+2*kk-1)); + if (t==NULL) { + PyErr_NoMemory(); + goto fail; + } + x0 = *((double *)PyArray_DATA(x_i)); + xN = *((double *)PyArray_DATA(x_i) + N); + for (i=0; i 1*/ + t[i] = 2*x0 - *((double *)(PyArray_GETPTR1(x_i,kk-1-i))); + t[kk+N+i] = 2*xN - *((double *)(PyArray_GETPTR1(x_i,N-1-i))); + } + ptr = t + (kk-1); + for (i=0; i<=N; i++) { + *ptr++ = *((double *)(PyArray_GETPTR1(x_i, i))); + } + + /* Create work array to hold computed non-zero values for + the spline for a value of x. + */ + h = (double *)malloc(sizeof(double)*(2*kk+1)); + if (h==NULL) { + PyErr_NoMemory(); + goto fail; + } + + /* Determine the spline for each value of x */ + xx_iter = (PyArrayIterObject *)PyArray_IterNew((PyObject *)xx); + if (xx_iter == NULL) goto fail; + ptr = PyArray_DATA(yy); + while(PyArray_ITER_NOTDONE(xx_iter)) { + arg = *((double *)PyArray_ITER_DATA(xx_iter)); + if ((arg < x0) || (arg > xN)) { + /* If we are outside the interpolation region, + fill with zeros + */ + *ptr++ = 0.0; + } + else { + /* Find the interval that arg lies between in the set of knots + t[ell] <= arg < t[ell+1] (last-knot use the previous interval) */ + xN1 = *((double *)PyArray_DATA(x_i) + N-1); + if (arg >= xN1) { + ell = N + kk - 2; + } + else { + ell = kk-1; + while ((arg > t[ell])) ell++; + ell -= 1; + } + + _deBoor(t, arg, k, ell, h); + + sp = 0.0; + for (i=0; i<=k; i++) { + cval = *((double *)(PyArray_GETPTR1(coef, ell-i+1))); + sp += cval*h[k-i]; + } + *ptr++ = sp; + } + PyArray_ITER_NEXT(xx_iter); + } + Py_DECREF(xx_iter); + Py_DECREF(x_i); + Py_DECREF(coef); + Py_DECREF(xx); + free(t); + free(h); + return PyArray_Return(yy); + + fail: + Py_XDECREF(xx); + Py_XDECREF(coef); + Py_XDECREF(x_i); + Py_XDECREF(yy); + if (t != NULL) free(t); + if (h != NULL) free(h); + return NULL; +} + + +/* Given a set of (N+1) sample positions: + Construct the diagonals of the (N+1) x (N+K) matrix that is needed to find + the coefficients of a spline fit of order K. + Note that K>=2 because for K=0,1, the coefficients are just the + sample values themselves. + + The equation that expresses the constraints is + + s(x_i) = sum(c_j B_{j,K}(x_i), j=-K..N-1) = w_i for i=0..N + + This is equivalent to + + w = B*c where c.T = [c_{-K}, c{-K+1}, ..., c_{N-1}] and + w.T = [w_{0}, w_{1}, ..., w_{N}] + + Therefore B is an (N+1) times (N+K) matrix with entries + + B_{j,K}(x_i) for column j=-K..N-1 + and row i=0..N + + This routine takes the N+1 sample positions and the order k and + constructs the banded constraint matrix B (with k+1 non-zero diagonals) + + The returned array is (N+1) times (N+K) ready to be either used + to compute a minimally Kth-order derivative discontinuous spline + or to be expanded with an additional K-1 constraints to be used in + an exact reconstruction approach. + */ +static char doc_bsplmat[] = "B = _bsplmat(order,xk)\n" +"Construct the constraint matrix for spline fitting of order k\n" +"given sample positions in xk.\n" +"\n" +"If xk is an integer (N+1), then the result is equivalent to\n" +"xk=arange(N+1)+x0 for any value of x0. This produces the\n" +"integer-spaced, or cardinal spline matrix a bit faster."; +static PyObject *_bsplmat(PyObject *dummy, PyObject *args) { + int k,N,i,numbytes,j, equal; + int dims[2]; + PyObject *x_i_py=NULL; + PyArrayObject *x_i=NULL, *BB=NULL; + double *t=NULL, *h=NULL, *ptr; + double x0, xN, arg; + if (!PyArg_ParseTuple(args, "iO", &k, &x_i_py)) + return NULL; + if (k < 2) { + PyErr_Format(PyExc_ValueError, "order (%d) must be >=2", k); + return NULL; + } + + equal = 0; + N = PySequence_Length(x_i_py); + if (N == -1 && PyErr_Occurred()) { + PyErr_Clear(); + N = PyInt_AsLong(x_i_py); + if (N==-1 && PyErr_Occurred()) goto fail; + equal = 1; + } + N -= 1; + + /* create output matrix */ + dims[0] = N+1; + dims[1] = N+k; + BB = (PyArrayObject *)PyArray_ZEROS(2, dims, NPY_DOUBLE, 0); + if (BB == NULL) goto fail; + + t = (double *)malloc(sizeof(double)*(N+2*k-1)); + if (t==NULL) { + PyErr_NoMemory(); + goto fail; + } + + /* Create work array to hold computed non-zero values for + the spline for a value of x. + */ + h = (double *)malloc(sizeof(double)*(2*k+1)); + if (h==NULL) { + PyErr_NoMemory(); + goto fail; + } + + numbytes = k*sizeof(double); + + if (equal) { /* points equally spaced by 1 */ + /* we run deBoor's algorithm one time with artificially created knots + Then, we keep copying the result to every row */ + + /* Create knots at equally-spaced locations from -(K-1) to N+K-1 */ + ptr = t; + for (i=-k+1; i 1*/ + t[i] = 2*x0 - *((double *)(PyArray_GETPTR1(x_i,k-1-i))); + t[k+N+i] = 2*xN - *((double *)(PyArray_GETPTR1(x_i,N-1-i))); + } + ptr = t + (k-1); + for (i=0; i<=N; i++) { + *ptr++ = *((double *)(PyArray_GETPTR1(x_i, i))); + } + + + /* Determine the K+1 non-zero values of the spline and place them in the + correct location in the matrix for each row (along the diagonals). + In fact, the last member is always zero so only K non-zero values + are present. + */ + ptr = PyArray_DATA(BB); + for (i=0,j=k-1; i 5: + raise ValueError("order must be between 0 and 5 inclusive") if kind == 'smoothest': func = eval('_find_smoothest%d' % order) From scipy-svn at scipy.org Sat May 26 22:04:45 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 26 May 2007 21:04:45 -0500 (CDT) Subject: [Scipy-svn] r3045 - trunk/Lib/interpolate Message-ID: <20070527020445.C912B39C020@new.scipy.org> Author: oliphant Date: 2007-05-26 21:04:41 -0500 (Sat, 26 May 2007) New Revision: 3045 Modified: trunk/Lib/interpolate/__fitpack.h trunk/Lib/interpolate/_fitpackmodule.c Log: Add calculation of full derivative so that smoothest interpolant of every order can be found. Modified: trunk/Lib/interpolate/__fitpack.h =================================================================== --- trunk/Lib/interpolate/__fitpack.h 2007-05-26 00:58:59 UTC (rev 3044) +++ trunk/Lib/interpolate/__fitpack.h 2007-05-27 02:04:41 UTC (rev 3045) @@ -624,6 +624,42 @@ } +static void +_deBoor_kth_derivative(double *t, int k, int ell, double *result) { + /* On completion the result array stores + the k+1 non-zero values of beta^{(k)}_{i,k}(x): for i=ell, ell-1, ell-2, ell-k. + Where t[ell] <= x < t[ell+1]. + */ + /* Implements a modified recursive algorithm similar to the one + to compute the value of the B-spline. But, modified to compute + the last derivative + */ + + double *hh = result + k + 1; + double *h = result; + double xb, xa, w; + int ind, j, m; + + result[0] = 1.0; + for (j=1; j<=k; j++) { + memcpy(hh, h, j*sizeof(double)); + h[0] = 0.0; + for (m=1; m<=j; m++) { + ind = ell + m; + xb = t[ind]; + xa = t[ind-j]; + if (xb == xa) { + h[m] = 0.0; + continue; + } + w = j*hh[m-1]/(xb-xa); + h[m-1] -= w; + h[m] = w; + } + } +} + + /* Given a set of (N+1) samples: A default set of knots is constructed using the samples xk plus 2*(K-1) additional knots where K = max(order,1) and the knots are chosen so that distances @@ -774,12 +810,12 @@ and row i=0..N This routine takes the N+1 sample positions and the order k and - constructs the banded constraint matrix B (with k+1 non-zero diagonals) + constructs the banded constraint matrix B (with k non-zero diagonals) The returned array is (N+1) times (N+K) ready to be either used to compute a minimally Kth-order derivative discontinuous spline or to be expanded with an additional K-1 constraints to be used in - an exact reconstruction approach. + an exact spline specification. */ static char doc_bsplmat[] = "B = _bsplmat(order,xk)\n" "Construct the constraint matrix for spline fitting of order k\n" @@ -904,3 +940,186 @@ +/* Given a set of (N+1) sample positions: + Construct the (N-1) x (N+K) error matrix J_{ij} such that + + for i=1..N-1, + + e_i = sum(J_{ij}c_{j},j=-K..N-1) + + is the discontinuity of the Kth derivative at the point i in the spline. + + This routine takes the N+1 sample positions and the order k and + constructs the banded matrix J + + The returned array is (N+1) times (N+K) ready to be either used + to compute a minimally Kth-order derivative discontinuous spline + or to be expanded with an additional K-1 constraints to be used in + an exact reconstruction approach. + */ +static char doc_bspldismat[] = "B = _bspldismat(order,xk)\n" +"Construct the kth derivative discontinuity jump constraint matrix \n" +"for spline fitting of order k given sample positions in xk.\n" +"\n" +"If xk is an integer (N+1), then the result is equivalent to\n" +"xk=arange(N+1)+x0 for any value of x0. This produces the\n" +"integer-spaced matrix a bit faster. If xk is a 2-tuple (N+1,dx)\n" +"then it produces the result as if the sample distance were dx"; +static PyObject *_bspldismat(PyObject *dummy, PyObject *args) { + int k,N,i,j, equal, m; + int dims[2]; + PyObject *x_i_py=NULL; + PyArrayObject *x_i=NULL, *BB=NULL; + double *t=NULL, *h=NULL, *ptr, *dptr; + double x0, xN, dx; + if (!PyArg_ParseTuple(args, "iO", &k, &x_i_py)) + return NULL; + if (k < 2) { + PyErr_Format(PyExc_ValueError, "order (%d) must be >=2", k); + return NULL; + } + + equal = 0; + N = PySequence_Length(x_i_py); + if (N==2 || (N == -1 && PyErr_Occurred())) { + PyErr_Clear(); + if (PyTuple_Check(x_i_py)) { + /* x_i_py = (N+1, dx) */ + N = PyInt_AsLong(PyTuple_GET_ITEM(x_i_py, 0)); + dx = PyFloat_AsDouble(PyTuple_GET_ITEM(x_i_py, 1)); + } + else { + N = PyInt_AsLong(x_i_py); + if (N==-1 && PyErr_Occurred()) goto fail; + dx = 1.0; + } + equal = 1; + } + N -= 1; + + if (N < 2) { + PyErr_Format(PyExc_ValueError, "too few samples (%d)", N); + return NULL; + } + /* create output matrix */ + dims[0] = N-1; + dims[1] = N+k; + BB = (PyArrayObject *)PyArray_ZEROS(2, dims, NPY_DOUBLE, 0); + if (BB == NULL) goto fail; + + t = (double *)malloc(sizeof(double)*(N+2*k-1)); + if (t==NULL) { + PyErr_NoMemory(); + goto fail; + } + + /* Create work array to hold computed non-zero values for + the spline for a value of x. + */ + h = (double *)malloc(sizeof(double)*(2*k+1)); + if (h==NULL) { + PyErr_NoMemory(); + goto fail; + } + + if (equal) { /* points equally spaced by 1 */ + /* we run deBoor's full derivative algorithm twice, subtract the results + offset by one and then copy the result one time with artificially created knots + Then, we keep copying the result to every row */ + + /* Create knots at equally-spaced locations from -(K-1) to N+K-1 */ + double *tmp, factor; + int numbytes; + numbytes = (k+2)*sizeof(double); + tmp = malloc(numbytes); + if (tmp==NULL) { + PyErr_NoMemory(); + goto fail; + } + ptr = t; + for (i=-k+1; i 1*/ + t[i] = 2*x0 - *((double *)(PyArray_GETPTR1(x_i,k-1-i))); + t[k+N+i] = 2*xN - *((double *)(PyArray_GETPTR1(x_i,N-1-i))); + } + ptr = t + (k-1); + for (i=0; i<=N; i++) { + *ptr++ = *((double *)(PyArray_GETPTR1(x_i, i))); + } + + + /* Determine the K+1 non-zero values of the discontinuity jump matrix + and place them in the correct location in the matrix for each row + (along the diagonals). + + The matrix is + + J_{ij} = b^{(k)}_{j,k}(x^{+}_i) - b^{(k)}_{j,k}(x^{-}_i) + + */ + ptr = PyArray_DATA(BB); + dptr = ptr; + for (i=0,j=k-1; i0) { + for (m=0; m<=k; m++) *dptr++ += h[m]; + } + /* store location of last start position plus one.*/ + dptr = ptr - k; + ptr += N; /* advance to next row shifted over one */ + } + /* We need to finish the result for the last row. */ + _deBoor_kth_derivative(t, k, j, h); + for (m=0; m<=k; m++) *dptr++ += h[m]; + + finish: + Py_XDECREF(x_i); + free(t); + free(h); + return (PyObject *)BB; + + fail: + Py_XDECREF(x_i); + Py_XDECREF(BB); + if (t != NULL) free(t); + if (h != NULL) free(h); + return NULL; +} + + + Modified: trunk/Lib/interpolate/_fitpackmodule.c =================================================================== --- trunk/Lib/interpolate/_fitpackmodule.c 2007-05-26 00:58:59 UTC (rev 3044) +++ trunk/Lib/interpolate/_fitpackmodule.c 2007-05-27 02:04:41 UTC (rev 3045) @@ -17,6 +17,7 @@ {"_insert", fitpack_insert, METH_VARARGS, doc_insert}, {"_bspleval", _bspleval, METH_VARARGS, doc_bspleval}, {"_bsplmat", _bsplmat, METH_VARARGS, doc_bsplmat}, +{"_bspldismat", _bspldismat, METH_VARARGS, doc_bspldismat}, {NULL, NULL, 0, NULL} }; PyMODINIT_FUNC init_fitpack(void) { From scipy-svn at scipy.org Sat May 26 23:00:16 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 26 May 2007 22:00:16 -0500 (CDT) Subject: [Scipy-svn] r3046 - in trunk/Lib/linalg: . tests Message-ID: <20070527030016.CAAE639C020@new.scipy.org> Author: wnbell Date: 2007-05-26 22:00:14 -0500 (Sat, 26 May 2007) New Revision: 3046 Modified: trunk/Lib/linalg/decomp.py trunk/Lib/linalg/tests/test_decomp.py Log: updated _datanotshared() to use __array_interface__ instead of __array_data__ added unittest to original problem resolves ticket #361 Modified: trunk/Lib/linalg/decomp.py =================================================================== --- trunk/Lib/linalg/decomp.py 2007-05-27 02:04:41 UTC (rev 3045) +++ trunk/Lib/linalg/decomp.py 2007-05-27 03:00:14 UTC (rev 3046) @@ -43,13 +43,19 @@ count += 1 return v + + def _datanotshared(a1,a): if a1 is a: - return 0 - if hasattr(a,"__array_data__"): - return a1.__array_data__[0] != a.__array_data__[0] - return 1 + return False + else: + #try comparing data pointers + try: + return a1.__array_interface__['data'][0] != a.__array_interface__['data'][0] + except: + return True + def _geneig(a1,b,left,right,overwrite_a,overwrite_b): b1 = asarray(b) overwrite_b = overwrite_b or _datanotshared(b1,b) Modified: trunk/Lib/linalg/tests/test_decomp.py =================================================================== --- trunk/Lib/linalg/tests/test_decomp.py 2007-05-27 02:04:41 UTC (rev 3045) +++ trunk/Lib/linalg/tests/test_decomp.py 2007-05-27 03:00:14 UTC (rev 3046) @@ -736,5 +736,25 @@ h1 = dot(transp(conj(q)),dot(a,q)) assert_array_almost_equal(h1,h) + + +class test_datanotshared(NumpyTestCase): + + def check_datanotshared(self): + from linalg.decomp import _datanotshared + + M = matrix([[0,1],[2,3]]) + A = asarray(M) + L = M.tolist() + M2 = M.copy() + + assert_equal(_datanotshared(M,M),False) + assert_equal(_datanotshared(M,A),False) + + assert_equal(_datanotshared(M,L),True) + assert_equal(_datanotshared(M,M2),True) + assert_equal(_datanotshared(A,M2),True) + + if __name__ == "__main__": NumpyTest().run() From scipy-svn at scipy.org Sat May 26 23:15:04 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 26 May 2007 22:15:04 -0500 (CDT) Subject: [Scipy-svn] r3047 - trunk/Lib/sparse Message-ID: <20070527031504.2EAD439C020@new.scipy.org> Author: wnbell Date: 2007-05-26 22:15:00 -0500 (Sat, 26 May 2007) New Revision: 3047 Modified: trunk/Lib/sparse/sparse.py Log: fixed small bug in rmatvec resolves ticket #359 Modified: trunk/Lib/sparse/sparse.py =================================================================== --- trunk/Lib/sparse/sparse.py 2007-05-27 03:00:14 UTC (rev 3046) +++ trunk/Lib/sparse/sparse.py 2007-05-27 03:15:00 UTC (rev 3047) @@ -951,7 +951,7 @@ return _cs_matrix._matvec(self, other, cscmux) def rmatvec(self, other, conjugate=True): - return _cs_matrix._rmatvec(self, other, shape[1], shape[0], cscmux, conjugate=conjugate) + return _cs_matrix._rmatvec(self, other, self.shape[1], self.shape[0], cscmux, conjugate=conjugate) def matmat(self, other): return _cs_matrix._matmat(self, other, cscmucsc) @@ -1301,7 +1301,7 @@ return _cs_matrix._matvec(self, other, csrmux) def rmatvec(self, other, conjugate=True): - return _cs_matrix._rmatvec(self, other, shape[0], shape[1], csrmux, conjugate=conjugate) + return _cs_matrix._rmatvec(self, other, self.shape[0], self.shape[1], csrmux, conjugate=conjugate) def matmat(self, other): return _cs_matrix._matmat(self, other, csrmucsr) From scipy-svn at scipy.org Sun May 27 00:19:27 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 26 May 2007 23:19:27 -0500 (CDT) Subject: [Scipy-svn] r3048 - in trunk/Lib/sparse: . tests Message-ID: <20070527041927.61BF539C02D@new.scipy.org> Author: wnbell Date: 2007-05-26 23:19:22 -0500 (Sat, 26 May 2007) New Revision: 3048 Modified: trunk/Lib/sparse/sparse.py trunk/Lib/sparse/tests/test_sparse.py Log: fixed lil_matrix and dok_matrix get/set item with negative indices added unittests for negative indices resolves ticket #347 Modified: trunk/Lib/sparse/sparse.py =================================================================== --- trunk/Lib/sparse/sparse.py 2007-05-27 03:15:00 UTC (rev 3047) +++ trunk/Lib/sparse/sparse.py 2007-05-27 04:19:22 UTC (rev 3048) @@ -859,6 +859,9 @@ raise ValueError, "nzmax must not be less than nnz" if (nnz>0) and (amax(self.indices[:nnz]) >= M): raise ValueError, "row values must be < M" + if (nnz>0) and (amin(self.indices[:nnz]) < 0): + raise ValueError, "row values must be >= 0" + if (self.indptr[-1] > len(self.indices)): raise ValueError, \ "Last value of index list should be less than "\ @@ -1232,7 +1235,9 @@ if (len(self.indptr) != M+1): raise ValueError, "index pointer should be of length #rows + 1" if (nnz>0) and (amax(self.indices[:nnz]) >= N): - raise ValueError, "column-values must be < N" + raise ValueError, "column values must be < N" + if (nnz>0) and (amin(self.indices[:nnz]) < 0): + raise ValueError, "column values must be >= 0" if (nnz > nzmax): raise ValueError, \ "last value of index list should be less than "\ @@ -1553,11 +1558,16 @@ except (ValueError, TypeError): raise TypeError, "index must be a pair of integers or slices" + # Bounds checking if isintlike(i): + if i < 0: + i += self.shape[0] if i < 0 or i >= self.shape[0]: raise IndexError, "index out of bounds" if isintlike(j): + if j < 0: + j += self.shape[1] if j < 0 or j >= self.shape[1]: raise IndexError, "index out of bounds" @@ -1644,8 +1654,14 @@ " sequences" i, j = key + # First deal with the case where both i and j are integers if isintlike(i) and isintlike(j): + if i < 0: + i += self.shape[0] + if j < 0: + j += self.shape[1] + if i < 0 or i >= self.shape[0] or j < 0 or j >= self.shape[1]: raise IndexError, "index out of bounds" if isintlike(value) and value == 0: @@ -2277,9 +2293,13 @@ def _get1(self, i, j): row = self.rows[i] data = self.data[i] - if j > self.shape[1]: - raise IndexError + if j < 0: + j += self.shape[1] + + if j < 0 or j > self.shape[1]: + raise IndexError,'column index out of bounds' + pos = bisect_left(row, j) if pos != len(data) and row[pos] == j: return data[pos] @@ -2314,7 +2334,7 @@ raise IndexError, "invalid index" if isscalar(i): - if isscalar(j): + if isscalar(j): return self._get1(i, j) if isinstance(j, slice): j = self._slicetoseq(j, self.shape[1]) @@ -2345,6 +2365,13 @@ def _insertat2(self, row, data, j, x): """ helper for __setitem__: insert a value in the given row/data at column j. """ + + if j < 0: #handle negative column indices + j += self.shape[1] + + if j < 0 or j >= self.shape[1]: + raise IndexError,'column index out of bounds' + pos = bisect_left(row, j) if x != 0: if pos == len(row): Modified: trunk/Lib/sparse/tests/test_sparse.py =================================================================== --- trunk/Lib/sparse/tests/test_sparse.py 2007-05-27 03:15:00 UTC (rev 3047) +++ trunk/Lib/sparse/tests/test_sparse.py 2007-05-27 04:19:22 UTC (rev 3048) @@ -84,7 +84,9 @@ a[1,2] = 4.0 a[0,1] = 3 a[2,0] = 2.0 - assert_array_equal(a.todense(),[[0,3,0,0],[0,0,4,0],[2,0,0,0]]) + a[0,-1] = 8 + a[-1,-2] = 7 + assert_array_equal(a.todense(),[[0,3,0,8],[0,0,4,0],[2,0,7,0]]) def check_add(self): a = self.datsp @@ -691,11 +693,7 @@ A[:,1] = A.copy() except: caught += 1 - try: - A[:,-1] = range(5) - except IndexError: - caught += 1 - assert caught == 6 + assert_equal(caught,5) class test_lil(_test_cs, _test_horiz_slicing, NumpyTestCase): From scipy-svn at scipy.org Sun May 27 00:49:44 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sat, 26 May 2007 23:49:44 -0500 (CDT) Subject: [Scipy-svn] r3049 - trunk/Lib/sparse Message-ID: <20070527044944.4A24E39C060@new.scipy.org> Author: wnbell Date: 2007-05-26 23:49:36 -0500 (Sat, 26 May 2007) New Revision: 3049 Modified: trunk/Lib/sparse/sparse.py Log: removed cmp function from spmatrix resolves ticket #290 Modified: trunk/Lib/sparse/sparse.py =================================================================== --- trunk/Lib/sparse/sparse.py 2007-05-27 04:19:22 UTC (rev 3048) +++ trunk/Lib/sparse/sparse.py 2007-05-27 04:49:36 UTC (rev 3049) @@ -162,9 +162,6 @@ val = val + self.listprint(0, nnz) return val[:-1] - def __cmp__(self, other): - raise NotImplementedError, "comparison of sparse matrices not implemented" - def __nonzero__(self): # Simple -- other ideas? return self.getnnz() > 0 From scipy-svn at scipy.org Sun May 27 06:58:28 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 27 May 2007 05:58:28 -0500 (CDT) Subject: [Scipy-svn] r3050 - trunk/Lib/interpolate Message-ID: <20070527105828.2CFF339C033@new.scipy.org> Author: oliphant Date: 2007-05-27 05:58:24 -0500 (Sun, 27 May 2007) New Revision: 3050 Modified: trunk/Lib/interpolate/__fitpack.h trunk/Lib/interpolate/interpolate.py Log: Fix up recently added interpolation so that it is more general and scales to higher orders by using the B-spline representation of a piece-wise polynomial curve. Implemented the deBoor algorithm multiple times. Modified: trunk/Lib/interpolate/__fitpack.h =================================================================== --- trunk/Lib/interpolate/__fitpack.h 2007-05-27 04:49:36 UTC (rev 3049) +++ trunk/Lib/interpolate/__fitpack.h 2007-05-27 10:58:24 UTC (rev 3050) @@ -589,72 +589,58 @@ static void -_deBoor(double *t, double x, int k, int ell, double *result) { +_deBoor_D(double *t, double x, int k, int ell, int m, double *result) { /* On completion the result array stores - the k+1 non-zero values of beta_i,k(x): for i=ell, ell-1, ell-2, ell-k. + the k+1 non-zero values of beta^(m)_i,k(x): for i=ell, ell-1, ell-2, ell-k. Where t[ell] <= x < t[ell+1]. */ - /* Implements the recursive algorithm of deBoor and Cox - Equivalent to what is done in fpbspl.f in fitpack except - their is no upper bound on the order of the spline. + /* Implements a recursive algorithm similar to the original algorithm of + deBoor. */ double *hh = result + k + 1; double *h = result; double xb, xa, w; - int ind, j, m; - + int ind, j, n; + + /* Perform k-m "standard" deBoor iterations */ + /* so that h contains the k+1 non-zero values of beta_{ell,k-m}(x) */ + /* needed to calculate the remaining derivatives. */ + result[0] = 1.0; - for (j=1; j<=k; j++) { + for (j=1; j<=k-m; j++) { memcpy(hh, h, j*sizeof(double)); h[0] = 0.0; - for (m=1; m<=j; m++) { - ind = ell + m; + for (n=1; n<=j; n++) { + ind = ell + n; xb = t[ind]; xa = t[ind-j]; if (xb == xa) { - h[m] = 0.0; + h[n] = 0.0; continue; } - w = hh[m-1]/(xb-xa); - h[m-1] += w*(xb-x); - h[m] = w*(x-xa); + w = hh[n-1]/(xb-xa); + h[n-1] += w*(xb-x); + h[n] = w*(x-xa); } } -} - -static void -_deBoor_kth_derivative(double *t, int k, int ell, double *result) { - /* On completion the result array stores - the k+1 non-zero values of beta^{(k)}_{i,k}(x): for i=ell, ell-1, ell-2, ell-k. - Where t[ell] <= x < t[ell+1]. - */ - /* Implements a modified recursive algorithm similar to the one - to compute the value of the B-spline. But, modified to compute - the last derivative - */ - - double *hh = result + k + 1; - double *h = result; - double xb, xa, w; - int ind, j, m; - - result[0] = 1.0; - for (j=1; j<=k; j++) { + /* Now do m "derivative" recursions */ + /* to convert the values of beta into the mth derivative */ + for (j=k-m+1; j<=k; j++) { memcpy(hh, h, j*sizeof(double)); h[0] = 0.0; - for (m=1; m<=j; m++) { - ind = ell + m; + for (n=1; n<=j; n++) { + ind = ell + n; xb = t[ind]; xa = t[ind-j]; if (xb == xa) { h[m] = 0.0; continue; } - w = j*hh[m-1]/(xb-xa); - h[m-1] -= w; - h[m] = w; + w = j*hh[n-1]/(xb-xa); + h[n-1] -= w; + h[n] = w; } } } @@ -674,22 +660,38 @@ The approximation interval is from xk[0] to xk[-1] Any xx outside that interval is set automatically to 0.0 */ -static char doc_bspleval[] = "y = _bspleval(xx,xk,coef,order)"; +static char doc_bspleval[] = "y = _bspleval(xx,xk,coef,k,{deriv (0)})\n" + "\n" + "The spline is defined by the approximation interval xk[0] to xk[-1],\n" + "the length of xk (N+1), the order of the spline, k, and \n" + "the number of coeficients N+k. The coefficients range from xk_{-K}\n" + "to xk_{N-1} inclusive and are all the coefficients needed to define\n" + "an arbitrary spline of order k, on the given approximation interval\n" + "\n" + "Extra knot points are internally added using knot-point symmetry \n" + "around xk[0] and xk[-1]"; + static PyObject *_bspleval(PyObject *dummy, PyObject *args) { - int k,kk,N,i,ell; + int k,kk,N,i,ell,dk,deriv=0; PyObject *xx_py=NULL, *coef_py=NULL, *x_i_py=NULL; PyArrayObject *xx=NULL, *coef=NULL, *x_i=NULL, *yy=NULL; PyArrayIterObject *xx_iter; double *t=NULL, *h=NULL, *ptr; double x0, xN, xN1, arg, sp, cval; - if (!PyArg_ParseTuple(args, "OOOi", &xx_py, &x_i_py, &coef_py, &k)) + if (!PyArg_ParseTuple(args, "OOOi|i", &xx_py, &x_i_py, &coef_py, &k, &deriv)) return NULL; if (k < 0) { PyErr_Format(PyExc_ValueError, "order (%d) must be >=0", k); return NULL; } + if (deriv > k) { + PyErr_Format(PyExc_ValueError, "derivative (%d) must be <= order (%d)", + deriv, k); + return NULL; + } kk = k; - if (k==0) kk = 1; + if (k==0) kk = 1; + dk = (k == 0 ? 0 : 1); x_i = (PyArrayObject *)PyArray_FROMANY(x_i_py, NPY_DOUBLE, 1, 1, NPY_ALIGNED); coef = (PyArrayObject *)PyArray_FROMANY(coef_py, NPY_DOUBLE, 1, 1, NPY_ALIGNED); xx = (PyArrayObject *)PyArray_FROMANY(xx_py, NPY_DOUBLE, 0, 0, NPY_ALIGNED); @@ -697,9 +699,9 @@ N = PyArray_DIM(x_i,0)-1; - if (PyArray_DIM(coef,0) < (N+kk)) { + if (PyArray_DIM(coef,0) < (N+k)) { PyErr_Format(PyExc_ValueError, "too few coefficients (have %d need at least %d)", - PyArray_DIM(coef,0), N+kk); + PyArray_DIM(coef,0), N+k); goto fail; } @@ -738,6 +740,7 @@ xx_iter = (PyArrayIterObject *)PyArray_IterNew((PyObject *)xx); if (xx_iter == NULL) goto fail; ptr = PyArray_DATA(yy); + while(PyArray_ITER_NOTDONE(xx_iter)) { arg = *((double *)PyArray_ITER_DATA(xx_iter)); if ((arg < x0) || (arg > xN)) { @@ -756,14 +759,14 @@ else { ell = kk-1; while ((arg > t[ell])) ell++; - ell -= 1; + if (arg != t[ell]) ell--; } - _deBoor(t, arg, k, ell, h); - + _deBoor_D(t, arg, k, ell, deriv, h); + sp = 0.0; for (i=0; i<=k; i++) { - cval = *((double *)(PyArray_GETPTR1(coef, ell-i+1))); + cval = *((double *)(PyArray_GETPTR1(coef, ell-i+dk))); sp += cval*h[k-i]; } *ptr++ = sp; @@ -879,7 +882,7 @@ ptr = t; for (i=-k+1; i= self.a) & (xnew <= self.b) + res[~mask] = self.fill + xx = xnew.compress(mask) + indxs = np.searchsorted(self.breaks, xx)-1 indxs = indxs.clip(0,len(self.breaks)) pp = self.coeffs - V = np.vander(xnew,N=self.N) - # res = np.diag(np.dot(V,pp[:,indxs])) - res = array([np.dot(V[k,:],pp[:,indxs[k]]) for k in xrange(len(xnew))]) + diff = xx - self.breaks.take(indxs) + V = np.vander(diff,N=self.K) + # values = np.diag(dot(V,pp[:,indxs])) + values = array([dot(V[k,:],pp[:,indxs[k]]) for k in xrange(len(xx))]) + res[mask] = values + res.shape = saveshape return res + def fromspline(cls, xk, cvals, order, fill=0.0): + N = len(xk)-1 + sivals = np.empty((order+1,N), dtype=float) + for m in xrange(order,-1,-1): + fact = spec.gamma(m+1) + res = _fitpack._bspleval(xk[:-1], xk, cvals, order, m) + res /= fact + sivals[order-m,:] = res + return cls(sivals, xk, fill=fill) + fromspline = classmethod(fromspline) + + +def _find_smoothest(xk, yk, order): + # construct Bmatrix, and Jmatrix + # e = J*c + # minimize norm(e,2) given B*c=yk + N = len(xk)-1 + K = order + B = _fitpack._bsplmat(order, xk) + J = _fitpack._bspldismat(order, xk) + u,s,vh = np.dual.svd(B) + ind = K-1 + V2 = vh[-ind:,:].T + V1 = vh[:-ind,:].T + A = dot(J.T,J) + tmp = dot(V2.T,A) + Q = dot(tmp,V2) + p = np.dual.solve(Q,tmp) + tmp = dot(V2,p) + tmp = np.eye(N+K) - tmp + tmp = dot(tmp,V1) + tmp = dot(tmp,np.diag(1.0/s)) + tmp = dot(tmp,u.T) + return dot(tmp, yk) + + + def _setdiag(a, k, v): assert (a.ndim==2) M,N = a.shape @@ -396,19 +455,19 @@ _setdiag(J,0,idk[:-1]) _setdiag(J,1,-idk[1:]-idk[:-1]) _setdiag(J,2,idk[1:]) - A = np.dot(J.T,J) - val = np.dot(V2,np.dot(A,V2)) - res1 = np.dot(np.outer(V2,V2)/val,A) - mk = np.dot(np.eye(Np1)-res1,np.dot(Bd,b)) + A = dot(J.T,J) + val = dot(V2,dot(A,V2)) + res1 = dot(np.outer(V2,V2)/val,A) + mk = dot(np.eye(Np1)-res1,dot(Bd,b)) return mk def _calc_fromJBd(J, Bd, b, V2, NN): - A = np.dot(J.T,J) - sub = np.dot(V2.T,np.dot(A,V2)) + A = dot(J.T,J) + sub = dot(V2.T,dot(A,V2)) subi = np.linalg.inv(sub) - res0 = np.dot(V2,subi) - res1 = np.dot(res0,np.dot(V2.T,A)) - mk = np.dot(np.eye(NN)-res1,np.dot(Bd,b)) + res0 = dot(V2,subi) + res1 = dot(res0,dot(V2.T,A)) + mk = dot(np.eye(NN)-res1,dot(Bd,b)) return mk def _find_smoothest3(xk, yk): @@ -423,7 +482,7 @@ _setdiag(B,2,dk[1:]) u,s,vh = np.dual.svd(B) V2 = vh[-2:,:].T - Bd = np.dot(vh[:-2,:].T, np.dot(diag(1.0/s),u.T)) + Bd = dot(vh[:-2,:].T, dot(np.diag(1.0/s),u.T)) b0 = np.diff(yk)/dk b = 6*np.diff(b0) J = np.zeros((N-1,N+1)) @@ -433,12 +492,6 @@ _setdiag(J,2,idk[1:]) return _calc_fromJBd(J, Bd, b, V2, Np1) -def _find_smoothest4(xk, yk): - raise NotImplementedError - -def _find_smoothest5(xk, yk): - raise NotImplementedError - def _get_spline2_Bb(xk, yk, kind, conds): Np1 = len(xk) dk = xk[1:]-xk[:-1] @@ -566,87 +619,14 @@ else: raise ValueError, "%s not supported" % kind -def _sp0eval((mk,xk,yk),xnew): - indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) - return yk[indxs-1] -def _sp0topp(mk,xk,yk): - c0 = yk - return ppform(array([c0]),xk) +def splmake(xk,yk,order=3,kind='smoothest',conds=None): + """Return a (xk, cvals, k) representation of a spline given + data-points where the (internal) knots are at the data-points. -def _sp1eval((mk,xk,yk),xnew): - indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) - indxsm1 = indxs-1 - d = xnew - xk[indxs] - dk = (x[1:]-x[:-1])[indxsm1] - wk = yk[indxs] - wkm1 = yk[indxsm1] - res = (wk-wkm1)/dk - res *= d - res += wk - return res - -def _sp1topp(mk,xk,yk): - c1 = (yk[1:]-yk[:-1])/(xk[1:]-xk[:-1]) - c0 = yk[1:] - c1*xk[1:] - return ppform(array([c1,c0]), xk) - -def _sp3eval((mk,xk,yk),xnew): - """Evaluate a cubic-spline representation of the points (xk,yk) - at the new values xnew. The mk values are the second derivatives at xk - The xk vector must be sorted. - - More than one curve can be represented using 2-d arrays for mk and yk. - However, the last dimension must have the same shape as the 1-d array xk. - The first-dimension will be considered the interpolating dimension. - """ - indxs = np.searchsorted(xk, xnew) - indxs = indxs.clip(1,len(xk)) - indxsm1 = indxs-1 - xkm1 = xk[indxsm1] - xkvals = xk[indxs] - dm1 = xnew - xkm1 - d = xkvals - xnew - mk0 = mk[indxs] - mkm1 = mk[indxsm1] - dk = xkvals-xkm1 - val = (mk0*dm1**3. + mkm1*d**3.)/(6*dk) - val += (yk[indxsm1]/dk - mkm1*dk/6.)*d - val += (yk[indxs]/dk - mk0*dk/6.)*dm1 - return val - -def _sp3topp(mk,xk,yk): - """Return an N-d array providing the piece-wise polynomial form. - - mk - second derivative at the knots - xk - knot-points - yk - values of the curve at the knots - - The first 2 dimensions are the polynomial for a particular - curve. The first dimension provides the coefficients for the - polynomial and the second dimension provides the different pieces - """ - dk = xk[1:] - xk[:-1] - temp1 = mk[1:] - mk[:-1] - temp2 = mk[1:]*xk[:-1]-mk[:-1]*xk[1:] - c3 = temp1/(6*dk) - c2 = -temp2/(2*dk) - c1 = (mk[1:]*xk[:-1]**2 - mk[:-1]*xk[1:]**2)/(2*dk) - c1 -= temp1*dk/6. - c1 += (yk[1:]-yk[:-1])/dk - c0 = (mk[:-1]*xk[1:]**3 - mk[1:]*xk[:-1]**3)/(6*dk) - c0 += temp2*dk/6. - c0 += (yk[:-1]*xk[1:] - yk[1:]*xk[:-1])/dk - return ppform([c3,c2,c1,c0], xk) - - -def splmake(xk,yk,order=3,kind='not-a-knot',conds=None): - """Return an (mk,xk,yk,order) representation of a spline given - data-points - yk can be an N-d array to represent more than one curve, through the same xk points. The first dimension is assumed to be the - interpolating dimenion. + interpolating dimension. kind can be 'natural', 'second', 'first', 'clamped', 'endslope', 'periodic', 'symmetric', 'parabolic', 'not-a-knot', @@ -660,128 +640,70 @@ N = yk.shape[0]-1 order = int(order) - if order in [0,1]: - return order, xk, yk, order - if order < 2 or order > 5: - raise ValueError("order must be between 0 and 5 inclusive") + if order < 0: + raise ValueError("order must not be negative") + if order == 0: + return xk, yk[:-1], order + elif order == 1: + return xk, yk, order if kind == 'smoothest': - func = eval('_find_smoothest%d' % order) - mk = func(xk,yk) - return mk, xk, yk, order + coefs = _find_smoothest(xk,yk,order) + return xk, coefs, order - try: - func = eval('_get_spline%d_Bb'%order) - except NameError: - raise ValueError("order %d not available" % order) + raise NotImplementedError - B,b,exfunc,nlu = func(xk, yk, kind, conds) +## try: +## func = eval('_get_spline%d_Bb'%order) +## except NameError: +## raise ValueError("order %d not available" % order) - if nlu is None: - mk = np.dual.solve(B,b) - else: - mk = slin.solve_banded(nlu,B,b) - if exfunc is not None: - # need to add additional values to mk - # using the returned function - mk = exfunc(mk) +## B,b,exfunc,nlu = func(xk, yk, kind, conds) + +## if nlu is None: +## mk = np.dual.solve(B,b) +## else: +## mk = slin.solve_banded(nlu,B,b) + +## if exfunc is not None: +## # need to add additional values to mk +## # using the returned function +## mk = exfunc(mk) - return mk, xk, yk, order +## return mk, xk, yk, order -def spleval((mk,xk,yk,order),xnew): - """Evaluate a spline represented by a tuple at the new x-values. +def spleval((xk,cvals,k),xnew,deriv=0): + """Evaluate a fixed spline represented by the given tuple at the new x-values. + The xk values are the interior knot points. The approximation region is + xk[0] to xk[-1]. If N+1 is the length of xk, then cvals should be N+k where + k is the order of the spline. + + Internally, an additional max(k-1,0) knot points are added on either + side of the spline. + + If cvals represents more than one curve and/or xnew is N-d, then the result is + xnew.shape + cvals.shape[1:] providing the interpolation of multiple curves. """ - func = eval('_sp%deval'%order) - return func((mk,xk,yk),xnew) + oldshape = np.shape(xnew) + xx = np.ravel(xnew) + sh = cvals.shape[1:] + res = np.empty(xx.shape + sh) + for index in np.ndindex(*sh): + sl = (slice(None),)+index + res[sl] = _fitpack._bspleval(xx,xk,cvals[sl],k,deriv) + res.shape = oldshape + sh + return res + -def spltopp(mk,xk,yk,order=3): - """Return a piece-wise polynomial object from a spline tuple. +def spltopp(xk,cvals,k): + """Return a piece-wise polynomial object from a fixed-spline tuple. """ - return eval('_sp%dtopp'%order)(mk,xk,yk) + return ppform.fromspline(xk, cvals, k) -def spline(xk,yk,xnew,order=3,kind='not-a-knot',conds=None): +def spline(xk,yk,xnew,order=3,kind='smoothest',conds=None): """Interpolate a curve (xk,yk) at points xnew using a spline fit. """ - func = eval('_sp%deval'%order) - return func(splmake(xk,yk,order=order,kind=kind,conds=conds)[:-1],xnew) + return spleval(splmake(xk,yk,order=order,kind=kind,conds=conds),xnew) -def _sp2topp(zk,xk,yk): - dk = xk[1:]-xk[:-1] - c2 = (zk[1:]-zk[:-1])/(2*dk) - c1 = (xk[1:]*zk[:-1]-xk[:-1]*zk[1:])/dk - c0 = (zk[1:]*xk[:-1]**2 - zk[:-1]*xk[1:]**2)/(2*dk) - c0 += yk[1:]- zk[1:]*dk/2. - return ppform([c2,c1,c0],xk) - -def _sp2eval((zk,xk,yk),xnew): - indxs = np.searchsorted(xk, xnew) - indxs = indxs.clip(1,len(xk)) - indxsm1 = indxs-1 - dk = (xk[1:]-xk[:-1])[indxsm1] - d = xnew - xk[indxs] - zk0 = zk[indxs] - res = (zk0-zk[indxsm1])/(2*dk) - res *= d - res += zk0 - res *= d - res += yk[indxs] - return res - -def _sp4topp(mk,xk,yk): - raise NotImplementedError - -def _sp4eval((mk,xk,yk),xnew): - nk = mk[1::2] # second-derivatives - mk = mk[::2] # third-derivatives - indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) - indxsm1 = indxs-1 - dk = (xk[1:]-xk[:-1])[indxsm1] - d = xnew - xk[indxs] - nk0 = nk[indxs] - nkm1 = nk[indxsm1] - mk0 = mk[indxs] - wk = yk[indxs] - wkm1 = yk[indxsm1] - res = (nk0-nkm1)/(24*dk)*d - res += nk0/6. - res *= d - res += mk0/2. - res *= d - res += mk0*dk/2. + (wk-wkm1)/dk - (3*nk0+nkm1)*(dk**2)/24. - res *= d - res += wk - return res - -def _sp5topp(mk,xk,yk): - raise NotImplementedError - -def _sp5eval((mk,xk,yk),xnew): - mk = mk[::3] - nk = mk[1::3] - ok = mk[2::3] - indxs = np.searchsorted(xk, xnew).clip(1,len(xk)) - indxsm1 = indxs-1 - dk = (xk[1:]-xk[:-1])[indxsm1] - d = xnew - xk[indxs] - ok0 = ok[indxs] - okm1 = ok[indxsm1] - nk0 = nk[indxs] - mk0 = mk[indxs] - wk = yk[indxs] - wkm1 = yk[indxsm1] - res = (ok0-ok1)/(120*dk) - res *= d - res += ok0/24. - res *= d - res += nk0/6. - res *= d - res += mk0/2. - res *= d - res += (4*ok0+okm1)*(dk**3)/120. - nk0*(dk**2)/6. - res += mk0*dk/2. + (wk-wkm1)/dk - res *= d - res += wk - return res - From scipy-svn at scipy.org Sun May 27 21:32:49 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 27 May 2007 20:32:49 -0500 (CDT) Subject: [Scipy-svn] r3051 - in trunk/Lib/sandbox/pyem: . doc doc/examples Message-ID: <20070528013249.F279E39C0A0@new.scipy.org> Author: cdavid Date: 2007-05-27 20:32:34 -0500 (Sun, 27 May 2007) New Revision: 3051 Added: trunk/Lib/sandbox/pyem/doc/ trunk/Lib/sandbox/pyem/doc/Bic_example.png trunk/Lib/sandbox/pyem/doc/Makefile trunk/Lib/sandbox/pyem/doc/base.tex trunk/Lib/sandbox/pyem/doc/example1.png trunk/Lib/sandbox/pyem/doc/examples/ trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py trunk/Lib/sandbox/pyem/doc/examples/examples.py trunk/Lib/sandbox/pyem/doc/index.txt trunk/Lib/sandbox/pyem/doc/tutorial.pdf trunk/Lib/sandbox/pyem/doc/user.tex Removed: trunk/Lib/sandbox/pyem/basic_example1.py trunk/Lib/sandbox/pyem/basic_example2.py trunk/Lib/sandbox/pyem/basic_example3.py trunk/Lib/sandbox/pyem/examples.py Log: Move documentation files in doc repository. * all examples lie in doc/examples. * A Makefile to generate the pdf from the rest files and python examples is now included. Deleted: trunk/Lib/sandbox/pyem/basic_example1.py =================================================================== --- trunk/Lib/sandbox/pyem/basic_example1.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/basic_example1.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -1,48 +0,0 @@ -import numpy as N -import pylab as P -from scipy.sandbox.pyem import GM - -#------------------------------ -# Hyper parameters: -# - K: number of clusters -# - d: dimension -k = 3 -d = 2 - -#------------------------------------------------------- -# Values for weights, mean and (diagonal) variances -# - the weights are an array of rank 1 -# - mean is expected to be rank 2 with one row for one component -# - variances are also expteced to be rank 2. For diagonal, one row -# is one diagonal, for full, the first d rows are the first variance, -# etc... In this case, the variance matrix should be k*d rows and d -# colums -w = N.array([0.2, 0.45, 0.35]) -mu = N.array([[4.1, 3], [1, 5], [-2, -3]]) -va = N.array([[1, 1.5], [3, 4], [2, 3.5]]) - -#----------------------------------------- -# First method: directly from parameters: -# Both methods are equivalents. -gm = GM.fromvalues(w, mu, va) - -#------------------------------------- -# Second method to build a GM instance: -gm = GM(d, k, mode = 'diag') -# The set_params checks that w, mu, and va corresponds to k, d and m -gm.set_param(w, mu, va) - -# Once set_params is called, both methods are equivalent. The 2d -# method is useful when using a GM object for learning (where -# the learner class will set the params), whereas the first one -# is useful when there is a need to quickly sample a model -# from existing values, without a need to give the hyper parameters - -# Create a Gaussian Mixture from the parameters, and sample -# 1000 items from it (one row = one 2 dimension sample) -data = gm.sample(1000) - -# Plot the samples -P.plot(data[:, 0], data[:, 1], '.') -# Plot the ellipsoids of confidence with a level a 75 % -gm.plot(level = 0.75) Deleted: trunk/Lib/sandbox/pyem/basic_example2.py =================================================================== --- trunk/Lib/sandbox/pyem/basic_example2.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/basic_example2.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -1,45 +0,0 @@ -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -# To reproduce results, fix the random seed -seed(1) - -#+++++++++++++++++++++++++++++ -# Meta parameters of the model -# - k: Number of components -# - d: dimension of each Gaussian -# - mode: Mode of covariance matrix: full or diag (string) -# - nframes: number of frames (frame = one data point = one -# row of d elements) -k = 2 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.5) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -# Create a Model from a Gaussian mixture with kmean initialization -lgm = GM(d, k, mode) -gmm = GMM(lgm, 'kmean') - -# The actual EM, with likelihood computation. The threshold -# is compared to the (linearly appromixated) derivative of the likelihood -em = EM() -like = em.train(data, gmm, maxiter = 30, thresh = 1e-8) - -# The computed parameters are in gmm.gm, which is the same than lgm -# (remember, python does not copy most objects by default). You can for example -# plot lgm against gm to compare Deleted: trunk/Lib/sandbox/pyem/basic_example3.py =================================================================== --- trunk/Lib/sandbox/pyem/basic_example3.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/basic_example3.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -1,64 +0,0 @@ -import numpy as N -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -seed(2) - -k = 4 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GMM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.0) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -# List of learned mixtures lgm[i] is a mixture with i+1 components -lgm = [] -kmax = 6 -bics = N.zeros(kmax) -em = EM() -for i in range(kmax): - lgm.append(GM(d, i+1, mode)) - - gmm = GMM(lgm[i], 'kmean') - em.train(data, gmm, maxiter = 30, thresh = 1e-10) - bics[i] = gmm.bic(data) - -print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) - -#+++++++++++++++ -# Draw the model -#+++++++++++++++ -import pylab as P -P.subplot(3, 2, 1) - -for k in range(kmax): - P.subplot(3, 2, k+1) - level = 0.9 - P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') - - # h keeps the handles of the plot, so that you can modify - # its parameters like label or color - h = lgm[k].plot(level = level) - [i.set_color('r') for i in h] - h[0].set_label('EM confidence ellipsoides') - - h = gm.plot(level = level) - [i.set_color('g') for i in h] - h[0].set_label('Real confidence ellipsoides') - -P.legend(loc = 0) -# depending on your configuration, you may have to call P.show() -# to actually display the figure Property changes on: trunk/Lib/sandbox/pyem/doc ___________________________________________________________________ Name: svn:ignore + *.aux *.log *.out *.tex Added: trunk/Lib/sandbox/pyem/doc/Bic_example.png =================================================================== (Binary files differ) Property changes on: trunk/Lib/sandbox/pyem/doc/Bic_example.png ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/Lib/sandbox/pyem/doc/Makefile =================================================================== --- trunk/Lib/sandbox/pyem/doc/Makefile 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/Makefile 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,44 @@ +# Last Change: Mon May 28 10:00 AM 2007 J + +# This makefile is used to build the pdf from the rest file and inlined code +# from python examples + +py2tex = PYTHONPATH=/home/david/local/lib/python2.4/site-packages pygmentize -l python -f tex +rst2tex = PYTHONPATH=/home/david/local/lib/python2.4/site-packages rst2newlatex.py \ + --stylesheet-path base.tex --user-stylesheet user.tex + +pytexfiles = pyem.tex basic_example1.tex basic_example2.tex basic_example3.tex + +SOURCEPATH = $(PWD) + +EXTTOCLEAN=.chk .dvi .log .aux .bbl .blg .blig .ilg .toc .lof .lot .idx .ind .out .bak .ps .pdf .bm + +tutorial.pdf: pyem.pdf + mv $< $@ + +pyem.pdf: $(pytexfiles) + pdflatex $< + pdflatex $< + pdflatex $< + +pyem.tex: index.txt + $(rst2tex) $< > $@ + +basic_example1.tex: examples/basic_example1.py + $(py2tex) $< > $@ + +basic_example2.tex: examples/basic_example2.py + $(py2tex) $< > $@ + +basic_example3.tex: examples/basic_example3.py + $(py2tex) $< > $@ + +clean: + for i in $(pytexfiles); do \ + rm -f `echo $$i`; \ + done; + for i in $(SOURCEPATH); do \ + for j in $(EXTTOCLEAN); do \ + rm -f `echo $$i/*$$j`; \ + done; \ + done; Added: trunk/Lib/sandbox/pyem/doc/base.tex =================================================================== --- trunk/Lib/sandbox/pyem/doc/base.tex 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/base.tex 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,1182 @@ +% System stylesheet for the new LaTeX writer, newlatex2e. + +% Major parts of the rendering are done in this stylesheet and not in the +% Python module. + +% For development notes, see notes.txt. + +% User documentation (in the stylesheet for now; that may change though): + +% Naming conventions: +% All uppercase letters in macro names have a specific meaning. +% \D...: All macros introduced by the Docutils LaTeX writer start with "D". +% \DS: Setup function (called at the bottom of this stylesheet). +% \DN{}: Handler for Docutils document tree node `node`; called by +% the Python module. +% \DEV: External variable, set by the Python module. +% \DEC: External command. It is called by the Python module and must be +% defined in this stylesheet. +% \DNA{}{}{}{}{}: +% Attribute handler for `attribute` set on nodes of type `nodename`. +% See below for a discussion of attribute handlers. +% \DA{}{}{}{}{}: +% Attribute handler for all `attribute`. Called only when no specific +% \DNA handler is defined. +% \DNC{}: +% Handler for `class`, when set on nodes of type `nodename`. +% \DC{}: +% Handler for `class`. Called only when no specific \DNC +% handler is defined. +% \D: Generic variable or function. + +% Attribute handlers: +% TODO + +% --------------------------------------------------------------------------- + +% Having to intersperse code with \makeatletter-\makeatother pairs is very +% annoying, so we call \makeatletter at the top and \makeatother at the +% bottom. Just be aware that you cannot use "@" as a text character inside +% this stylesheet. +\makeatletter + +% Print-mode (as opposed to online mode e.g. with Adobe Reader). +% This causes for example blue hyperlinks. +\providecommand{\Dprinting}{false} + +% \DSearly is called right after \documentclass. +\providecommand{\DSearly}{} +% \DSlate is called at the end of the stylesheet (right before the document +% tree). +\providecommand{\DSlate}{} + +% Use the KOMA script article class. +\providecommand{\Ddocumentclass}{scrartcl} +\providecommand{\Ddocumentoptions}{a4paper} +\providecommand{\DSdocumentclass}{ + \documentclass[\Ddocumentoptions]{\Ddocumentclass} } + +% Todo: This should be movable to the bottom, but it isn't as long as +% we use \usepackage commands at the top level of this stylesheet +% (which we shouldn't). +\DSdocumentclass + +\providecommand{\DSpackages}{ + % Load miscellaneous packages. + % Note 1: Many of the packages loaded here are used throughout this stylesheet. + % If one of these packages does not work on your system or in your scenario, + % please let us know, so we can consider making the package optional. + % Note 2: It would appear cleaner to load packages where they are used. + % However, since using a wrong package loading order can lead to *very* + % subtle bugs, we centralize the loading of most packages here. + \DSfontencoding % load font encoding packages + \DSlanguage % load babel + % Using \ifthenelse conditionals. + \usepackage{ifthen} % before hyperref (really!) + % There is not support for *not* using hyperref because it's used in many + % places. If this is a problem (e.g. because hyperref doesn't work on your + % system), please let us know. + \usepackage[colorlinks=false,pdfborder={0 0 0}]{hyperref} + % Get color, e.g. for links and system messages. + \usepackage{color} + % Get \textnhtt macro (non-hyphenating type writer). + \usepackage{hyphenat} + % For sidebars. + \usepackage{picins} + % We use longtable to create tables. + \usepackage{longtable} + % Images. + \usepackage{graphicx} + % These packages might be useful (some just add magic pixie dust), so + % evaluate them: + %\usepackage{fixmath} + %\usepackage{amsmath} + % Add some missing symbols like \textonehalf. + \usepackage{textcomp} +} + +\providecommand{\DSfontencoding}{ + % Set up font encoding. Called by \DSpackages. + % AE is a T1 emulation. It provides mostly the same characters and + % features as T1-encoded fonts but doesn't use bitmap fonts (which are + % unsuitable for online reading and subtle for printers). + \usepackage{ae} + % Provide the characters not contained in AE from EC bitmap fonts. + \usepackage{aecompl} + % Guillemets ("<<", ">>") in AE. + \usepackage{aeguill} +} + +\providecommand{\DSsymbols}{% + % Fix up symbols. + % The Euro symbol in Computer Modern looks, um, funny. Let's get a + % proper Euro symbol. + \usepackage{eurosym}% + \renewcommand{\texteuro}{\euro}% +} + +% Taken from +% +% and modified. Used with permission. +\providecommand{\Dprovidelength}[2]{% + \begingroup% + \escapechar\m at ne% + \xdef\@gtempa{{\string#1}}% + \endgroup% + \expandafter\@ifundefined\@gtempa% + {\newlength{#1}\setlength{#1}{#2}}% + {}% +} + +\providecommand{\Dprovidecounter}[2]{% + % Like \newcounter except that it doesn't crash if the counter + % already exists. + \@ifundefined{c@#1}{\newcounter{#1}\setcounter{#1}{#2}}{} +} + +\Dprovidelength{\Dboxparindent}{\parindent} + +\providecommand{\Dmakebox}[1]{% + % Make a centered, frameless box. Useful e.g. for block quotes. + % Do not use minipages here, but create pseudo-lists to allow + % page-breaking. (Don't use KOMA-script's addmargin environment + % because it messes up bullet lists.) + \Dmakelistenvironment{}{}{% + \setlength{\parskip}{0pt}% + \setlength{\parindent}{\Dboxparindent}% + \item{#1}% + }% +} + +\providecommand{\Dmakefbox}[1]{% + % Make a centered, framed box. Useful e.g. for admonitions. + \vspace{0.4\baselineskip}% + \begin{center}% + \fbox{% + \begin{minipage}[t]{0.9\linewidth}% + \setlength{\parindent}{\Dboxparindent}% + #1% + \end{minipage}% + }% + \end{center}% + \vspace{0.4\baselineskip}% +} + +% We do not currently recognize the difference between an end-sentence and a +% mid-sentence period (". " vs. ". " in plain text). So \frenchspacing is +% appropriate. +\providecommand{\DSfrenchspacing}{\frenchspacing} + + +\Dprovidelength{\Dblocklevelvspace}{% + % Space between block-level elements other than paragraphs. + 0.7\baselineskip plus 0.3\baselineskip minus 0.2\baselineskip% +} +\providecommand{\DECauxiliaryspace}{% + \ifthenelse{\equal{\Dneedvspace}{true}}{\vspace{\Dblocklevelvspace}}{}% + \par\noindent% +} +\providecommand{\DECparagraphspace}{\par} +\providecommand{\Dneedvspace}{true} + +\providecommand{\DSlanguage}{% + % Set up babel. + \usepackage[\DEVlanguagebabel]{babel} +} + +\providecommand{\Difdefined}[3]{\@ifundefined{#1}{#3}{#2}} + +% Handler for 'classes' attribute (called for each class attribute). +\providecommand{\DAclasses}[5]{% + % Dispatch to \DNC. + \Difdefined{DN#4C#3}{% + % Pass only contents, nothing else! + \csname DN#4C#3\endcsname{#5}% + }{% + % Otherwise, dispatch to \DC. + \Difdefined{DC#3}{% + \csname DC#3\endcsname{#5}% + }{% + #5% + }% + }% +} + +\providecommand{\DECattr}[5]{% + % Global attribute dispatcher, called inside the document tree. + % Parameters: + % 1. Attribute number. + % 2. Attribute name. + % 3. Attribute value. + % 4. Node name. + % 5. Node contents. + \Difdefined{DN#4A#2}{% + % Dispatch to \DNA. + \csname DN#4A#2\endcsname{#1}{#2}{#3}{#4}{#5}% + }{\Difdefined{DA#2}{% + % Otherwise dispatch to \DA. + \csname DA#2\endcsname{#1}{#2}{#3}{#4}{#5}% + }{% + % Otherwise simply run the contents without calling a handler. + #5% + }}% +} + +% ---------- Link handling ---------- +% Targets and references. + +\providecommand{\Draisedlink}[1]{% + % Anchors are placed on the base line by default. This is a bad thing for + % inline context, so we raise the anchor (normally by \baselineskip). + \Hy at raisedlink{#1}% +} + +% References. +% We're assuming here that the "refid" and "refuri" attributes occur +% only in inline context (in TextElements). +\providecommand{\DArefid}[5]{% + \ifthenelse{\equal{#4}{reference}}{% + \Dexplicitreference{\##3}{#5}% + }{% + % If this is not a target node (targets with refids are + % uninteresting and should be silently dropped). + \ifthenelse{\not\equal{#4}{target}}{% + % If this is a footnote reference, call special macro. + \ifthenelse{\equal{#4}{footnotereference}}{% + \Dimplicitfootnotereference{\##3}{#5}% + }{% + \ifthenelse{\equal{#4}{citationreference}}{% + \Dimplicitcitationreference{\##3}{#5}% + }{% + \Dimplicitreference{\##3}{#5}% + }% + }% + }{}% + }% +} +\providecommand{\DArefuri}[5]{% + \ifthenelse{\equal{#4}{target}}{% + % The node name is 'target', so this is a hyperlink target, like this: + % .. _mytarget: URI + % Hyperlink targets are ignored because they are invisible. + }{% + % If a non-target node has a refuri attribute, it must be an explicit URI + % reference (i.e. node name is 'reference'). + \Durireference{#3}{#5}% + }% +} +% Targets. +\providecommand{\DAids}[5]{% + \label{#3}% + \ifthenelse{\equal{#4}{footnotereference}}{% + {% + \renewcommand{\HyperRaiseLinkDefault}{% + % Dirty hack to make backrefs to footnote references work. + % For some reason, \baselineskip is 0pt in fn references. + 0.5\Doriginalbaselineskip% + }% + \Draisedlink{\hypertarget{#3}{}}#5% + }% + }{% + \Draisedlink{\hypertarget{#3}{}}#5% + }% +} +\providecommand{\Dimplicitreference}[2]{% + % Create implicit reference to ID. Implicit references occur + % e.g. in TOC-backlinks of section titles. Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{#2}% +} +\providecommand{\Dimplicitfootnotereference}[2]{% + % Ditto, but for the special case of footnotes. + % We want them to be rendered like explicit references. + \Dexplicitreference{#1}{#2}% +} +\providecommand{\Dimplicitcitationreference}[2]{% + % Ditto for citation references. + \Dimplicitfootnotereference{#1}{#2}% +} +\providecommand{\Dcolorexplicitreference}{% + \ifthenelse{\equal{\Dprinting}{true}}{\color{black}}{\color{blue}}% +} +\providecommand{\Dexplicitreference}[2]{% + % Create explicit reference to ID, e.g. created with "foo_". + % Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{{\Dcolorexplicitreference#2}}% +} +\providecommand{\Dcolorurireference}{\Dcolorexplicitreference} +\providecommand{\Durireference}[2]{% + % Create reference to URI. Parameters: + % 1. Target. + % 2. Link text. + \href{#1}{{\Dcolorurireference#2}}% +} + +\Dprovidecounter{Dpdfbookmarkid}{0}% +\providecommand{\Dpdfbookmark}[1]{% + % Temporarily decrement Desctionlevel counter. + \addtocounter{Dsectionlevel}{-1}% + %\typeout{\arabic{Dsectionlevel}}% + %\typeout{#1}% + %\typeout{docutils\roman{Dpdfbookmarkid}}% + %\typeout{}% + \pdfbookmark[\arabic{Dsectionlevel}]{#1}{docutils\arabic{Dpdfbookmarkid}}% + \addtocounter{Dsectionlevel}{1}% + \addtocounter{Dpdfbookmarkid}{1}% +} +% ---------- End of Link Handling ---------- + +\providecommand{\DNparagraph}[1]{% + \ifthenelse{\equal{\DEVparagraphindented}{true}}{\indent}{\noindent}% + #1% +} +\providecommand{\Dformatboxtitle}[1]{{\Large\textbf{#1}}} +\providecommand{\Dformatboxsubtitle}[1]{{\large\textbf{#1}}} +\providecommand{\Dtopictitle}[1]{% + \Difinsidetoc{\vspace{1em}\par}{}% + \noindent\Dformatboxtitle{#1}% + \ifthenelse{\equal{\DEVhassubtitle}{false}}{\vspace{1em}}{\vspace{0.5em}}% + \par% +} +\providecommand{\Dadmonitiontitle}[1]{% + \Dtopictitle{#1}% +} +\providecommand{\Dtopicsubtitle}[1]{% + \noindent\Dformatboxsubtitle{#1}% + \vspace{1em}% + \par% +} +\providecommand{\Dsidebartitle}[1]{\Dtopictitle{#1}} +\providecommand{\Dsidebarsubtitle}[1]{\Dtopicsubtitle{#1}} +\providecommand{\Ddocumenttitle}[1]{% + \begin{center}{\Huge#1}\end{center}% + \ifthenelse{\equal{\DEVhassubtitle}{true}}{\vspace{0.1cm}}{\vspace{1cm}}% +} +\providecommand{\Ddocumentsubtitle}[1]{% + \begin{center}{\huge#1}\end{center}% + \vspace{1cm}% +} +% Can be overwritten by user stylesheet. +\providecommand{\Dformatsectiontitle}[1]{#1} +\providecommand{\Dformatsectionsubtitle}[1]{\Dformatsectiontitle{#1}} +\providecommand{\Dbookmarksectiontitle}[1]{% + % Return text suitable for use in \section*, \subsection*, etc., + % containing a PDF bookmark. Parameter: The title (as node tree). + \Draisedlink{\Dpdfbookmark{\DEVtitleastext}}% + #1% +} +\providecommand{\Dsectiontitlehook}[1]{#1} +\providecommand{\Dsectiontitle}[1]{% + \Dsectiontitlehook{% + \Ddispatchsectiontitle{\Dbookmarksectiontitle{\Dformatsectiontitle{#1}}}% + }% +} +\providecommand{\Ddispatchsectiontitle}[1]{% + \@ifundefined{Dsectiontitle\roman{Dsectionlevel}}{% + \Ddeepsectiontitle{#1}% + }{% + \csname Dsectiontitle\roman{Dsectionlevel}\endcsname{#1}% + }% +} +\providecommand{\Ddispatchsectionsubtitle}[1]{% + \Ddispatchsectiontitle{#1}% +} +\providecommand{\Dsectiontitlei}[1]{\section*{#1}} +\providecommand{\Dsectiontitleii}[1]{\subsection*{#1}} +\providecommand{\Ddeepsectiontitle}[1]{% + % Anything below \subsubsection (like \paragraph or \subparagraph) + % is useless because it uses the same font. The only way to + % (visually) distinguish such deeply nested sections is to use + % section numbering. + \subsubsection*{#1}% +} +\providecommand{\Dsectionsubtitlehook}[1]{#1} +\Dprovidelength{\Dsectionsubtitleraisedistance}{0.7em} +\providecommand{\Dsectionsubtitlescaling}{0.85} +\providecommand{\Dsectionsubtitle}[1]{% + \Dsectionsubtitlehook{% + % Move the subtitle nearer to the title. + \vspace{-\Dsectionsubtitleraisedistance}% + % Don't create a PDF bookmark. + \Ddispatchsectionsubtitle{% + \Dformatsectionsubtitle{\scalebox{\Dsectionsubtitlescaling}{#1}}% + }% + }% +} +\providecommand{\DNtitle}[1]{% + % Dispatch to \Dtitle. + \csname D\DEVparent title\endcsname{#1}% +} +\providecommand{\DNsubtitle}[1]{% + % Dispatch to \Dsubtitle. + \csname D\DEVparent subtitle\endcsname{#1}% +} + +\providecommand{\DNliteralblock}[1]{% + \Dmakelistenvironment{}{% + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + \setlength{\leftmargin}{0pt}% + }{}% + \setlength{\rightmargin}{0pt}% + }{% + \raggedright\item\noindent\nohyphens{\textnhtt{#1\Dfinalstrut}}% + }% +} +\providecommand{\DNdoctestblock}[1]{\DNliteralblock{#1}} +\providecommand{\DNliteral}[1]{\textnhtt{#1}} +\providecommand{\DNemphasis}[1]{\emph{#1}} +\providecommand{\DNstrong}[1]{\textbf{#1}} +\providecommand{\DECvisitdocument}{\begin{document}\noindent} +\providecommand{\DECdepartdocument}{\end{document}} +\providecommand{\DNtopic}[1]{% + \ifthenelse{\equal{\DEVcurrentNtopicAcontents}{1}}{% + \addtocounter{Dtoclevel}{1}% + \par\noindent% + #1% + \addtocounter{Dtoclevel}{-1}% + }{% + \par\noindent% + \Dmakebox{#1}% + }% +} +\providecommand{\DNadmonition}[1]{% + \DNtopic{#1}% +} +\providecommand{\Dformatrubric}[1]{\textbf{#1}} +\Dprovidelength{\Dprerubricspace}{0.3em} +\providecommand{\DNrubric}[1]{% + \vspace{\Dprerubricspace}\par\noindent\Dformatrubric{#1}\par% +} + +\providecommand{\Dbullet}{} +\providecommand{\DECsetbullet}[1]{\renewcommand{\Dbullet}{#1}} +\providecommand{\DNbulletlist}[1]{% + \Difinsidetoc{% + \Dtocbulletlist{#1}% + }{% + \Dmakelistenvironment{\Dbullet}{}{#1}% + }% +} +% Todo: So what on earth is @pnumwidth? +\renewcommand{\@pnumwidth}{2.2em} +\providecommand{\DNlistitem}[1]{% + \Difinsidetoc{% + \ifthenelse{\equal{\theDtoclevel}{1}\and\equal{\Dlocaltoc}{false}}{% + {% + \par\addvspace{1em}\noindent% + \sectfont% + #1\hfill\pageref{\DEVcurrentNlistitemAtocrefid}% + }% + }{% + \@dottedtocline{0}{\Dtocindent}{0em}{#1}{% + \pageref{\DEVcurrentNlistitemAtocrefid}% + }% + }% + }{% + \item{#1}% + }% +} +\providecommand{\DNenumeratedlist}[1]{#1} +\Dprovidecounter{Dsectionlevel}{0} +\providecommand{\Dvisitsectionhook}{} +\providecommand{\Ddepartsectionhook}{} +\providecommand{\DECvisitsection}{% + \addtocounter{Dsectionlevel}{1}% + \Dvisitsectionhook% +} +\providecommand{\DECdepartsection}{% + \Ddepartsectionhook% + \addtocounter{Dsectionlevel}{-1}% +} + +% Using \_ will cause hyphenation after _ even in \textnhtt-typewriter +% because the hyphenat package redefines \_. So we use +% \textunderscore here. +\providecommand{\DECtextunderscore}{\textunderscore} + +\providecommand{\Dtextinlineliteralfirstspace}{{ }} +\providecommand{\Dtextinlineliteralsecondspace}{{~}} + +\Dprovidelength{\Dlistspacing}{0.8\baselineskip} + +\providecommand{\Dsetlistrightmargin}{% + \ifthenelse{\lengthtest{\linewidth>12em}}{% + % Equal margins. + \setlength{\rightmargin}{\leftmargin}% + }{% + % If the line is narrower than 10em, we don't remove any further + % space from the right. + \setlength{\rightmargin}{0pt}% + }% +} +\providecommand{\Dresetlistdepth}{false} +\Dprovidelength{\Doriginallabelsep}{\labelsep} +\providecommand{\Dmakelistenvironment}[3]{% + % Make list environment with support for unlimited nesting and with + % reasonable default lengths. Parameters: + % 1. Label (same as in list environment). + % 2. Spacing (same as in list environment). + % 3. List contents (contents of list environment). + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + % Unfortunately, vertical spacing doesn't work correctly when + % using lists inside tabular environments, so we use a minipage. + \begin{minipage}[t]{\linewidth}% + }{}% + {% + \renewcommand{\Dneedvspace}{false}% + % \parsep0.5\baselineskip + \renewcommand{\Dresetlistdepth}{false}% + \ifnum \@listdepth>5% + \protect\renewcommand{\Dresetlistdepth}{true}% + \@listdepth=5% + \fi% + \begin{list}{% + #1% + }{% + \setlength{\itemsep}{0pt}% + \setlength{\partopsep}{0pt}% + \setlength{\topsep}{0pt}% + % List should take 90% of total width. + \setlength{\leftmargin}{0.05\linewidth}% + \ifthenelse{\lengthtest{\leftmargin<1.8em}}{% + \setlength{\leftmargin}{1.8em}% + }{}% + \setlength{\labelsep}{\Doriginallabelsep}% + \Dsetlistrightmargin% + #2% + }{% + #3% + }% + \end{list}% + \ifthenelse{\equal{\Dresetlistdepth}{true}}{\@listdepth=5}{}% + }% + \ifthenelse{\equal{\Dinsidetabular}{true}}{\end{minipage}}{}% +} +\providecommand{\Dfinalstrut}{\@finalstrut\@arstrutbox} +\providecommand{\DAlastitem}[5]{#5\Dfinalstrut} + +\Dprovidelength{\Ditemsep}{0pt} +\providecommand{\DECmakeenumeratedlist}[6]{% + % Make enumerated list. + % Parameters: + % - prefix + % - type (\arabic, \roman, ...) + % - suffix + % - suggested counter name + % - start number - 1 + % - list contents + \newcounter{#4}% + \Dmakelistenvironment{#1#2{#4}#3}{% + % Use as much space as needed for the label. + \setlength{\labelwidth}{10em}% + % Reserve enough space so that the label doesn't go beyond the + % left margin of preceding paragraphs. Like that: + % + % A paragraph. + % + % 1. First item. + \setlength{\leftmargin}{2.5em}% + \Dsetlistrightmargin% + \setlength{\itemsep}{\Ditemsep}% + % Use counter recommended by Python module. + \usecounter{#4}% + % Set start value. + \addtocounter{#4}{#5}% + }{% + % The list contents. + #6% + }% +} + + +% Single quote in literal mode. \textquotesingle from package +% textcomp has wrong width when using package ae, so we use a normal +% single curly quote here. +\providecommand{\DECtextliteralsinglequote}{'} + + +% "Tabular lists" are field lists and options lists (not definition +% lists because there the term always appears on its own line). We'll +% use the terminology of field lists now ("field", "field name", +% "field body"), but the same is also analogously applicable to option +% lists. +% +% We want these lists to be breakable across pages. We cannot +% automatically get the narrowest possible size for the left column +% (i.e. the field names or option groups) because tabularx does not +% support multi-page tables, ltxtable needs to have the table in an +% external file and we don't want to clutter the user's directories +% with auxiliary files created by the filecontents environment, and +% ltablex is not included in teTeX. +% +% Thus we set a fixed length for the left column and use list +% environments. This also has the nice side effect that breaking is +% now possible anywhere, not just between fields. +% +% Note that we are creating a distinct list environment for each +% field. There is no macro for a whole tabular list! +\Dprovidelength{\Dtabularlistfieldnamewidth}{6em} +\Dprovidelength{\Dtabularlistfieldnamesep}{0.5em} +\providecommand{\Dinsidetabular}{false} +\providecommand{\Dsavefieldname}{} +\providecommand{\Dsavefieldbody}{} +\Dprovidelength{\Dusedfieldnamewidth}{0pt} +\Dprovidelength{\Drealfieldnamewidth}{0pt} +\providecommand{\Dtabularlistfieldname}[1]{\renewcommand{\Dsavefieldname}{#1}} +\providecommand{\Dtabularlistfieldbody}[1]{\renewcommand{\Dsavefieldbody}{#1}} +\Dprovidelength{\Dparskiptemp}{0pt} +\providecommand{\Dtabularlistfield}[1]{% + {% + % This only saves field name and field body in \Dsavefieldname and + % \Dsavefieldbody, resp. It does not insert any text into the + % document. + #1% + % Recalculate the real field name width everytime we encounter a + % tabular list field because it may have been changed using a + % "raw" node. + \setlength{\Drealfieldnamewidth}{\Dtabularlistfieldnamewidth}% + \addtolength{\Drealfieldnamewidth}{\Dtabularlistfieldnamesep}% + \Dmakelistenvironment{% + \makebox[\Drealfieldnamewidth][l]{\Dsavefieldname}% + }{% + \setlength{\labelwidth}{\Drealfieldnamewidth}% + \setlength{\leftmargin}{\Drealfieldnamewidth}% + \setlength{\rightmargin}{0pt}% + \setlength{\labelsep}{0pt}% + }{% + \item% + \settowidth{\Dusedfieldnamewidth}{\Dsavefieldname}% + \setlength{\Dparskiptemp}{\parskip}% + \ifthenelse{% + \lengthtest{\Dusedfieldnamewidth>\Dtabularlistfieldnamewidth}% + }{% + \mbox{}\par% + \setlength{\parskip}{0pt}% + }{}% + \Dsavefieldbody% + \setlength{\parskip}{\Dparskiptemp}% + %XXX Why did we need this? + %\@finalstrut\@arstrutbox% + }% + \par% + }% +} + +\providecommand{\Dformatfieldname}[1]{\textbf{#1:}} +\providecommand{\DNfieldlist}[1]{#1} +\providecommand{\DNfield}[1]{\Dtabularlistfield{#1}} +\providecommand{\DNfieldname}[1]{% + \Dtabularlistfieldname{% + \Dformatfieldname{#1}% + }% +} +\providecommand{\DNfieldbody}[1]{\Dtabularlistfieldbody{#1}} + +\providecommand{\Dformatoptiongroup}[1]{% + % Format option group, e.g. "-f file, --input file". + \texttt{#1}% +} +\providecommand{\Dformatoption}[1]{% + % Format option, e.g. "-f file". + % Put into mbox to avoid line-breaking at spaces. + \mbox{#1}% +} +\providecommand{\Dformatoptionstring}[1]{% + % Format option string, e.g. "-f". + #1% +} +\providecommand{\Dformatoptionargument}[1]{% + % Format option argument, e.g. "file". + \textsl{#1}% +} +\providecommand{\Dformatoptiondescription}[1]{% + % Format option description, e.g. + % "\DNparagraph{Read input data from file.}" + #1% +} +\providecommand{\DNoptionlist}[1]{#1} +\providecommand{\Doptiongroupjoiner}{,{ }} +\providecommand{\Disfirstoption}{% + % Auxiliary macro indicating if a given option is the first child + % of its option group (if it's not, it has to preceded by + % \Doptiongroupjoiner). + false% +} +\providecommand{\DNoptionlistitem}[1]{% + \Dtabularlistfield{#1}% +} +\providecommand{\DNoptiongroup}[1]{% + \renewcommand{\Disfirstoption}{true}% + \Dtabularlistfieldname{\Dformatoptiongroup{#1}}% +} +\providecommand{\DNoption}[1]{% + % If this is not the first option in this option group, add a + % joiner. + \ifthenelse{\equal{\Disfirstoption}{true}}{% + \renewcommand{\Disfirstoption}{false}% + }{% + \Doptiongroupjoiner% + }% + \Dformatoption{#1}% +} +\providecommand{\DNoptionstring}[1]{\Dformatoptionstring{#1}} +\providecommand{\DNoptionargument}[1]{{ }\Dformatoptionargument{#1}} +\providecommand{\DNdescription}[1]{% + \Dtabularlistfieldbody{\Dformatoptiondescription{#1}}% +} + +\providecommand{\DNdefinitionlist}[1]{% + \begin{description}% + \parskip0pt% + #1% + \end{description}% +} +\providecommand{\DNdefinitionlistitem}[1]{% + % LaTeX expects the label in square brackets; we provide an empty + % label. + \item[]#1% +} +\providecommand{\Dformatterm}[1]{#1} +\providecommand{\DNterm}[1]{\hspace{-5pt}\Dformatterm{#1}} +% I'm still not sure what's the best rendering for classifiers. The +% colon syntax is used by reStructuredText, so it's at least WYSIWYG. +% Use slanted text because italic would cause too much emphasis. +\providecommand{\Dformatclassifier}[1]{\textsl{#1}} +\providecommand{\DNclassifier}[1]{~:~\Dformatclassifier{#1}} +\providecommand{\Dformatdefinition}[1]{#1} +\providecommand{\DNdefinition}[1]{\par\Dformatdefinition{#1}} + +\providecommand{\Dlineblockindentation}{2.5em} +\providecommand{\DNlineblock}[1]{% + \Dmakelistenvironment{}{% + \ifthenelse{\equal{\DEVparent}{lineblock}}{% + % Parent is a line block, so indent. + \setlength{\leftmargin}{\Dlineblockindentation}% + }{% + % At top level; don't indent. + \setlength{\leftmargin}{0pt}% + }% + \setlength{\rightmargin}{0pt}% + \setlength{\parsep}{0pt}% + }{% + #1% + }% +} +\providecommand{\DNline}[1]{\item#1} + +\providecommand{\DNtransition}{% + \raisebox{0.25em}{\parbox{\linewidth}{\hspace*{\fill}\hrulefill\hrulefill\hspace*{\fill}}}% +} + +\providecommand{\Dformatblockquote}[1]{% + % Format contents of block quote. + % This occurs in block-level context, so we cannot use \textsl. + {\slshape#1}% +} +\providecommand{\Dformatattribution}[1]{---\textup{#1}} +\providecommand{\DNblockquote}[1]{% + \Dmakebox{% + \Dformatblockquote{#1} + }% +} +\providecommand{\DNattribution}[1]{% + \par% + \begin{flushright}\Dformatattribution{#1}\end{flushright}% +} + + +% Sidebars: +% Vertical and horizontal margins. +\Dprovidelength{\Dsidebarvmargin}{0.5em} +\Dprovidelength{\Dsidebarhmargin}{1em} +% Padding (space between contents and frame). +\Dprovidelength{\Dsidebarpadding}{1em} +% Frame width. +\Dprovidelength{\Dsidebarframewidth}{2\fboxrule} +% Position ("l" or "r"). +\providecommand{\Dsidebarposition}{r} +% Width. +\Dprovidelength{\Dsidebarwidth}{0.45\linewidth} +\providecommand{\DNsidebar}[1]{ + \parpic[\Dsidebarposition]{% + \begin{minipage}[t]{\Dsidebarwidth}% + % Doing this with nested minipages is ugly, but I haven't found + % another way to place vertical space before and after the fbox. + \vspace{\Dsidebarvmargin}% + {% + \setlength{\fboxrule}{\Dsidebarframewidth}% + \setlength{\fboxsep}{\Dsidebarpadding}% + \fbox{% + \begin{minipage}[t]{\linewidth}% + \setlength{\parindent}{\Dboxparindent}% + #1% + \end{minipage}% + }% + }% + \vspace{\Dsidebarvmargin}% + \end{minipage}% + }% +} + + +% Citations and footnotes. +\providecommand{\Dformatfootnote}[1]{% + % Format footnote. + {% + \footnotesize#1% + % \par is necessary for LaTeX to adjust baselineskip to the + % changed font size. + \par% + }% +} +\providecommand{\Dformatcitation}[1]{\Dformatfootnote{#1}} +\Dprovidelength{\Doriginalbaselineskip}{0pt} +\providecommand{\DNfootnotereference}[1]{% + {% + % \baselineskip is 0pt in \textsuperscript, so we save it here. + \setlength{\Doriginalbaselineskip}{\baselineskip}% + \textsuperscript{#1}% + }% +} +\providecommand{\DNcitationreference}[1]{{[}#1{]}} +\Dprovidelength{\Dfootnotesep}{3.5pt} +\providecommand{\Dsetfootnotespacing}{% + % Spacing commands executed at the beginning of footnotes. + \setlength{\parindent}{0pt}% + \hspace{1em}% +} +\providecommand{\DNfootnote}[1]{% + % See ltfloat.dtx for details. + {% + \insert\footins{% + % BUG: This is too small if the user adds + % \onehalfspacing or \doublespace. + \vspace{\Dfootnotesep}% + \Dsetfootnotespacing% + \Dformatfootnote{#1}% + }% + }% +} +\providecommand{\DNcitation}[1]{\DNfootnote{#1}} +\providecommand{\Dformatfootnotelabel}[1]{% + % Keep \footnotesize in footnote labels (\textsuperscript would + % reduce the font size even more). + \textsuperscript{\footnotesize#1{ }}% +} +\providecommand{\Dformatcitationlabel}[1]{{[}#1{]}{ }} +\providecommand{\Dformatmultiplebackrefs}[1]{% + % If in printing mode, do not write out multiple backrefs. + \ifthenelse{\equal{\Dprinting}{true}}{}{\textsl{#1}}% +} +\providecommand{\Dthislabel}{} +\providecommand{\DNlabel}[1]{% + % Footnote or citatation label. + \renewcommand{\Dthislabel}{#1}% + \ifthenelse{\not\equal{\DEVsinglebackref}{}}{% + \let\Doriginallabel=\Dthislabel% + \def\Dthislabel{% + \Dsinglefootnotebacklink{\DEVsinglebackref}{\Doriginallabel}% + }% + }{}% + \ifthenelse{\equal{\DEVparent}{footnote}}{% + % Footnote label. + \Dformatfootnotelabel{\Dthislabel}% + }{% + \ifthenelse{\equal{\DEVparent}{citation}}{% + % Citation label. + \Dformatcitationlabel{\Dthislabel}% + }{}% + }% + % If there are multiple backrefs, add them now. + \Dformatmultiplebackrefs{\DEVmultiplebackrefs}% +} +\providecommand{\Dsinglefootnotebacklink}[2]{% + % Create normal backlink of a footnote label. Parameters: + % 1. ID. + % 2. Link text. + % Treat like a footnote reference. + \Dimplicitfootnotereference{\##1}{#2}% +} +\providecommand{\DECmultifootnotebacklink}[2]{% + % Create generated backlink, as in (1, 2). Parameters: + % 1. ID. + % 2. Link text. + % Treat like a footnote reference. + \Dimplicitfootnotereference{\##1}{#2}% +} +\providecommand{\Dsinglecitationbacklink}[2]{\Dsinglefootnotebacklink{#1}{#2}} +\providecommand{\DECmulticitationbacklink}[2]{\DECmultifootnotebacklink{#1}{#2}} + + +\providecommand{\DECmaketable}[2]{% + % Make table. Parameters: + % 1. Table spec (like "|p|p|"). + % 2. Table contents. + {% + \ifthenelse{\equal{\Dinsidetabular}{true}}{% + % Inside longtable; we cannot have nested longtables. + \begin{tabular}{#1}% + \hline% + #2% + \end{tabular}% + }{% + \renewcommand{\Dinsidetabular}{true}% + \begin{longtable}{#1}% + \hline% + #2% + \end{longtable}% + }% + }% +} +\providecommand{\DNthead}[1]{% + #1% + \endhead% +} +\providecommand{\DNrow}[1]{% + #1\tabularnewline% + \hline% +} +\providecommand{\Dinsidemulticolumn}{false} +\providecommand{\Dcompensatingmulticol}[3]{% + \multicolumn{#1}{#2}{% + {% + \renewcommand{\Dinsidemulticolumn}{true}% + % Compensate for weird missing vertical space at top of paragraph. + \raisebox{-2.5pt}{#3}% + }% + }% +} +\providecommand{\DECcolspan}[2]{% + % Take care of the morecols attribute (but incremented by 1). + &% + \Dcompensatingmulticol{#1}{l|}{#2}% +} +\providecommand{\DECcolspanleft}[2]{% + % Like \Dmorecols, but called for the leftmost entries in a table + % row. + \Dcompensatingmulticol{#1}{|l|}{#2}% +} +\providecommand{\DECsubsequententry}[1]{% + % +} +\providecommand{\DNentry}[1]{% + % The following sequence adds minimal vertical space above the top + % lines of the first cell paragraph, so that vertical space is + % balanced at the top and bottom of table cells. + \ifthenelse{\equal{\Dinsidemulticolumn}{false}}{% + \vspace{-1em}\vspace{-\parskip}\par% + }{}% + #1% + % No need to add an ampersand ("&"); that's done by \DECsubsequententry. +} +\providecommand{\DAtableheaderentry}[5]{\Dformattableheaderentry{#5}} +\providecommand{\Dformattableheaderentry}[1]{{\bfseries#1}} + + +\providecommand{\DNsystemmessage}[1]{% + {% + \ifthenelse{\equal{\Dprinting}{false}}{\color{red}}{}% + \bfseries% + #1% + }% +} + + +\providecommand{\Dinsidehalign}{false} +\newsavebox{\Dalignedimagebox} +\Dprovidelength{\Dalignedimagewidth}{0pt} +\providecommand{\Dhalign}[2]{% + % Horizontally align the contents to the left or right so that the + % text flows around it. + % Parameters: + % 1. l or r + % 2. Contents. + \renewcommand{\Dinsidehalign}{true}% + % For some obscure reason \parpic consumes some vertical space. + \vspace{-3pt}% + % Now we do something *really* ugly, but this enables us to wrap the + % image in a minipage while still allowing tight frames when + % class=border (see \DNimageCborder). + \sbox{\Dalignedimagebox}{#2}% + \settowidth{\Dalignedimagewidth}{\usebox{\Dalignedimagebox}}% + \parpic[#1]{% + \begin{minipage}[b]{\Dalignedimagewidth}% + % Compensate for previously added space, but not entirely. + \vspace*{2.0pt}% + \vspace*{\Dfloatimagetopmargin}% + \usebox{\Dalignedimagebox}% + \vspace*{1.5pt}% + \vspace*{\Dfloatimagebottommargin}% + \end{minipage}% + }% + \renewcommand{\Dinsidehalign}{false}% +} + + +% Maximum width of an image. +\providecommand{\Dimagemaxwidth}{\linewidth} +\providecommand{\Dfloatimagemaxwidth}{0.5\linewidth} +% Auxiliary variable. +\Dprovidelength{\Dcurrentimagewidth}{0pt} +\providecommand{\DNimageAalign}[5]{% + \ifthenelse{\equal{#3}{left}}{% + \Dhalign{l}{#5}% + }{% + \ifthenelse{\equal{#3}{right}}{% + \Dhalign{r}{#5}% + }{% + \ifthenelse{\equal{#3}{center}}{% + % Text floating around centered figures is a bad idea. Thus + % we use a center environment. Note that no extra space is + % added by the writer, so the space added by the center + % environment is fine. + \begin{center}#5\end{center}% + }{% + #5% + }% + }% + }% +} +% Base path for images. +\providecommand{\Dimagebase}{} +% Auxiliary command. Current image path. +\providecommand{\Dimagepath}{} +\providecommand{\DNimageAuri}[5]{% + % Insert image. We treat the URI like a path here. + \renewcommand{\Dimagepath}{\Dimagebase#3}% + \Difdefined{DcurrentNimageAwidth}{% + \Dwidthimage{\DEVcurrentNimageAwidth}{\Dimagepath}% + }{% + \Dsimpleimage{\Dimagepath}% + }% +} +\Dprovidelength{\Dfloatimagevmargin}{0pt} +\providecommand{\Dfloatimagetopmargin}{\Dfloatimagevmargin} +\providecommand{\Dfloatimagebottommargin}{\Dfloatimagevmargin} +\providecommand{\Dwidthimage}[2]{% + % Image with specified width. + % Parameters: + % 1. Image width. + % 2. Image path. + % Need to make bottom-alignment dependent on align attribute (add + % functional test first). Need to observe height attribute. + %\begin{minipage}[b]{#1}% + \includegraphics[width=#1,height=\textheight,keepaspectratio]{#2}% + %\end{minipage}% +} +\providecommand{\Dcurrentimagemaxwidth}{} +\providecommand{\Dsimpleimage}[1]{% + % Insert image, without much parametrization. + \settowidth{\Dcurrentimagewidth}{\includegraphics{#1}}% + \ifthenelse{\equal{\Dinsidehalign}{true}}{% + \renewcommand{\Dcurrentimagemaxwidth}{\Dfloatimagemaxwidth}% + }{% + \renewcommand{\Dcurrentimagemaxwidth}{\Dimagemaxwidth}% + }% + \ifthenelse{\lengthtest{\Dcurrentimagewidth>\Dcurrentimagemaxwidth}}{% + \Dwidthimage{\Dcurrentimagemaxwidth}{#1}% + }{% + \Dwidthimage{\Dcurrentimagewidth}{#1}% + }% +} +\providecommand{\Dwidthimage}[2]{% + % Image with specified width. + % Parameters: + % 1. Image width. + % 2. Image path. + \Dwidthimage{#1}{#2}% +} + +% Figures. +\providecommand{\DNfigureAalign}[5]{% + % Hack to make it work Right Now. + %\def\DEVcurrentNimageAwidth{\DEVcurrentNfigureAwidth}% + % + %\def\DEVcurrentNimageAwidth{\linewidth}% + \DNimageAalign{#1}{#2}{#3}{#4}{% + \begin{minipage}[b]{0.4\linewidth}#5\end{minipage}}% + %\let\DEVcurrentNimageAwidth=\relax% + % + %\let\DEVcurrentNimageAwidth=\relax% +} +\providecommand{\DNcaption}[1]{\par\noindent{\slshape#1}} +\providecommand{\DNlegend}[1]{\DECauxiliaryspace#1} + +\providecommand{\DCborder}[1]{\fbox{#1}} +% No padding between image and border. +\providecommand{\DNimageCborder}[1]{\frame{#1}} + + +% Need to replace with language-specific stuff. Maybe look at +% csquotes.sty and ask the author for permission to use parts of it. +\providecommand{\DECtextleftdblquote}{``} +\providecommand{\DECtextrightdblquote}{''} + +% Table of contents: +\Dprovidelength{\Dtocininitialsectnumwidth}{2.4em} +\Dprovidelength{\Dtocadditionalsectnumwidth}{0.7em} +% Level inside a table of contents. While this is at -1, we are not +% inside a TOC. +\Dprovidecounter{Dtoclevel}{-1}% +\providecommand{\Dlocaltoc}{false}% +\providecommand{\DNtopicClocal}[1]{% + \renewcommand{\Dlocaltoc}{true}% + \addtolength{\Dtocsectnumwidth}{2\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocindent}{-2\Dtocadditionalsectnumwidth}% + #1% + \addtolength{\Dtocindent}{2\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocsectnumwidth}{-2\Dtocadditionalsectnumwidth}% + \renewcommand{\Dlocaltoc}{false}% +} +\Dprovidelength{\Dtocindent}{0pt}% +\Dprovidelength{\Dtocsectnumwidth}{\Dtocininitialsectnumwidth} +% Compensate for one additional TOC indentation space so that the +% top-level is unindented. +\addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth} +\addtolength{\Dtocindent}{-\Dtocsectnumwidth} +\providecommand{\Difinsidetoc}[2]{% + \ifthenelse{\not\equal{\theDtoclevel}{-1}}{#1}{#2}% +} +\providecommand{\DNgeneratedCsectnum}[1]{% + \Difinsidetoc{% + % Section number inside TOC. + \makebox[\Dtocsectnumwidth][l]{#1}% + }{% + % Section number inside section title. + #1\quad% + }% +} +\providecommand{\Dtocbulletlist}[1]{% + \addtocounter{Dtoclevel}{1}% + \addtolength{\Dtocindent}{\Dtocsectnumwidth}% + \addtolength{\Dtocsectnumwidth}{\Dtocadditionalsectnumwidth}% + #1% + \addtolength{\Dtocsectnumwidth}{-\Dtocadditionalsectnumwidth}% + \addtolength{\Dtocindent}{-\Dtocsectnumwidth}% + \addtocounter{Dtoclevel}{-1}% +} + + +% For \DECpixelunit, the length value is pre-multiplied with 0.75, so by +% specifying "pt" we get the same notion of "pixel" as graphicx. +\providecommand{\DECpixelunit}{pt} +% Normally lengths are relative to the current linewidth. +\providecommand{\DECrelativeunit}{\linewidth} + + +% ACTION: These commands actually *do* something. +% Ultimately, everything should be done here, and no active content should be +% above (not even \usepackage). + +\DSearly +\DSpackages +\DSfrenchspacing +\DSsymbols +\DSlate + +\makeatother + + \usepackage{fancyvrb} Added: trunk/Lib/sandbox/pyem/doc/example1.png =================================================================== (Binary files differ) Property changes on: trunk/Lib/sandbox/pyem/doc/example1.png ___________________________________________________________________ Name: svn:mime-type + application/octet-stream Added: trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,48 @@ +import numpy as N +import pylab as P +from scipy.sandbox.pyem import GM + +#------------------------------ +# Hyper parameters: +# - K: number of clusters +# - d: dimension +k = 3 +d = 2 + +#------------------------------------------------------- +# Values for weights, mean and (diagonal) variances +# - the weights are an array of rank 1 +# - mean is expected to be rank 2 with one row for one component +# - variances are also expteced to be rank 2. For diagonal, one row +# is one diagonal, for full, the first d rows are the first variance, +# etc... In this case, the variance matrix should be k*d rows and d +# colums +w = N.array([0.2, 0.45, 0.35]) +mu = N.array([[4.1, 3], [1, 5], [-2, -3]]) +va = N.array([[1, 1.5], [3, 4], [2, 3.5]]) + +#----------------------------------------- +# First method: directly from parameters: +# Both methods are equivalents. +gm = GM.fromvalues(w, mu, va) + +#------------------------------------- +# Second method to build a GM instance: +gm = GM(d, k, mode = 'diag') +# The set_params checks that w, mu, and va corresponds to k, d and m +gm.set_param(w, mu, va) + +# Once set_params is called, both methods are equivalent. The 2d +# method is useful when using a GM object for learning (where +# the learner class will set the params), whereas the first one +# is useful when there is a need to quickly sample a model +# from existing values, without a need to give the hyper parameters + +# Create a Gaussian Mixture from the parameters, and sample +# 1000 items from it (one row = one 2 dimension sample) +data = gm.sample(1000) + +# Plot the samples +P.plot(data[:, 0], data[:, 1], '.') +# Plot the ellipsoids of confidence with a level a 75 % +gm.plot(level = 0.75) Added: trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,45 @@ +from numpy.random import seed + +from scipy.sandbox.pyem import GM, GMM, EM +import copy + +# To reproduce results, fix the random seed +seed(1) + +#+++++++++++++++++++++++++++++ +# Meta parameters of the model +# - k: Number of components +# - d: dimension of each Gaussian +# - mode: Mode of covariance matrix: full or diag (string) +# - nframes: number of frames (frame = one data point = one +# row of d elements) +k = 2 +d = 2 +mode = 'diag' +nframes = 1e3 + +#+++++++++++++++++++++++++++++++++++++++++++ +# Create an artificial GM model, samples it +#+++++++++++++++++++++++++++++++++++++++++++ +w, mu, va = GM.gen_param(d, k, mode, spread = 1.5) +gm = GM.fromvalues(w, mu, va) + +# Sample nframes frames from the model +data = gm.sample(nframes) + +#++++++++++++++++++++++++ +# Learn the model with EM +#++++++++++++++++++++++++ + +# Create a Model from a Gaussian mixture with kmean initialization +lgm = GM(d, k, mode) +gmm = GMM(lgm, 'kmean') + +# The actual EM, with likelihood computation. The threshold +# is compared to the (linearly appromixated) derivative of the likelihood +em = EM() +like = em.train(data, gmm, maxiter = 30, thresh = 1e-8) + +# The computed parameters are in gmm.gm, which is the same than lgm +# (remember, python does not copy most objects by default). You can for example +# plot lgm against gm to compare Added: trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,64 @@ +import numpy as N +from numpy.random import seed + +from scipy.sandbox.pyem import GM, GMM, EM +import copy + +seed(2) + +k = 4 +d = 2 +mode = 'diag' +nframes = 1e3 + +#+++++++++++++++++++++++++++++++++++++++++++ +# Create an artificial GMM model, samples it +#+++++++++++++++++++++++++++++++++++++++++++ +w, mu, va = GM.gen_param(d, k, mode, spread = 1.0) +gm = GM.fromvalues(w, mu, va) + +# Sample nframes frames from the model +data = gm.sample(nframes) + +#++++++++++++++++++++++++ +# Learn the model with EM +#++++++++++++++++++++++++ + +# List of learned mixtures lgm[i] is a mixture with i+1 components +lgm = [] +kmax = 6 +bics = N.zeros(kmax) +em = EM() +for i in range(kmax): + lgm.append(GM(d, i+1, mode)) + + gmm = GMM(lgm[i], 'kmean') + em.train(data, gmm, maxiter = 30, thresh = 1e-10) + bics[i] = gmm.bic(data) + +print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) + +#+++++++++++++++ +# Draw the model +#+++++++++++++++ +import pylab as P +P.subplot(3, 2, 1) + +for k in range(kmax): + P.subplot(3, 2, k+1) + level = 0.9 + P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') + + # h keeps the handles of the plot, so that you can modify + # its parameters like label or color + h = lgm[k].plot(level = level) + [i.set_color('r') for i in h] + h[0].set_label('EM confidence ellipsoides') + + h = gm.plot(level = level) + [i.set_color('g') for i in h] + h[0].set_label('Real confidence ellipsoides') + +P.legend(loc = 0) +# depending on your configuration, you may have to call P.show() +# to actually display the figure Added: trunk/Lib/sandbox/pyem/doc/examples/examples.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/examples.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/examples/examples.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,14 @@ +def ex1(): + import basic_example1 + +def ex2(): + import basic_example2 + +def ex3(): + import basic_example3 + +if __name__ == '__main__': + ex1() + ex2() + ex3() + Added: trunk/Lib/sandbox/pyem/doc/index.txt =================================================================== --- trunk/Lib/sandbox/pyem/doc/index.txt 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/index.txt 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,234 @@ +.. + restindex + page-title: Pyem + crumb: Pyem + link-title: Pyem + encoding: utf-8 + output-encoding: None + tags: python,pyem,Expectation Maximization,EM,online EM,recursive EM + file: basic_example1.py + file: basic_example2.py + file: basic_example3.py + file: example1.png + file: Bic_example.png + /restindex + +.. Last Change: Mon May 28 10:00 AM 2007 J + +=================================================== + PyEM, a python package for Gaussian mixture models +=================================================== + +.. contents:: Tables of contents + +PyEM is a package which enables to create Gaussian Mixture Models +(diagonal and full covariance matrices supported), to sample them, +and to estimate them from data using Expectation Maximization algorithm. +It can also draw confidence ellipsoides for multivariate models, and +compute the Bayesian Information Criterion to assess the number of +clusters in the data. In a near future, I hope to add so-called +online EM (ie recursive EM) and variational Bayes implementation. + +PyEM is implemented in python, and uses the excellent numpy and scipy +packages. Numpy is a python packages which gives python a fast +multi-dimensional array capabilities (ala matlab and the likes); scipy +leverages numpy to build common scientific features for signal processing, +linear algebra, statistics, etc... + + +Installation +============ + +.. _scipy: http://www.scipy.org + +Pyem depends on several packages to work: + + - numpy + - matplotlib (if you wish to use the plotting facilities of pyem) + +Those packages are likely to be already installed in a typical numpy/scipy environment. + +Since september 2006, pyem is included in the sandbox of `scipy`_. The sandbox +contains packages which are pending for approval in main scipy; that means it +is not installed by default, and that you need to install scipy from sources. +For the most up-to-date version of pyem, you need to download scipy from +subversion, which contains the development branch of scipy. + +To install pyem, you just need to edit (or create if it does not exist) +the file Lib/sandbox/enabled_packages.txt in scipy sources, and +add one line with the name of the package (eg pyem). +After, you just need to install scipy normally as explained +`here `_. + +You can (and should) also test pyem installation using the following: + +.. raw:: html + + {+mycoloring} + + from scipy.sandbox import pyem + pyem.test() + {-mycoloring} + +basic usage +============ + +Once you are inside a python interpreter, you can import pyem +using the follwing command: + +.. raw:: html + + {+mycoloring} + + from scipy.sandbox import pyem + {-mycoloring} + + +Creating, sampling and plotting a mixture +----------------------------------------- + +Importing pyem gives access to 3 classes: GM (for Gausssian Mixture), GMM +(Gaussian Mixture Model) and EM (for Expectation Maximization). The first +class GM can be used to create an artificial Gaussian Model, samples it, +or plot it. The following example show how to create +a 2 dimension Gaussian Model with 3 components, sample it and plot +its confidence ellipsoids with matplotlib: + +.. raw:: html + + {mycolorize;input/softwares/pyem/basic_example1.py} + +.. raw:: latex + + \input{basic_example1.tex} + +which plots this figure: + +.. image:: example1.png + :width: 500 + :height: 400 + +There are basically two ways to create a GM instance: either an empty one (eg +without mean, weights and covariances) by giving hyper parameters (dimension, +number of clusters, type of covariance matrices) during instanciation, or +giving all parameters using the class method GM.fromvalues. The first method is +mostly useful as a container for learning. There are also methods to create +random (but valid !) parameters for a Gaussian Mixture: this can be done by the +function method GM.generate_params (a class method). + +Basic estimation of mixture parameters from data +------------------------------------------------ + +If you want to learn a Gaussian mixture from data with EM, you need to use two +classes from pyem: GMM and EM. You first create a GMM object from a GM +instance; then you can give the GMM object as a parameter to the EM class to +compute iterations of EM; once the EM has finished the computation, the GM +instance of GMM contains the computed parameters. + +.. raw:: html + + {mycolorize;input/softwares/pyem/basic_example2.py} + +.. raw:: latex + + \input{basic_example2.tex} + +GMM class do all the hard work for learning: it can compute the sufficient +statistics given the current state of the model, and update its parameters from +the sufficient statistics; the EM class uses a GMM instance to compute several +iterations. The idea is that you can implements a different mixture model and +uses the same EM class if you want (there are several optimized models for GMM, +which are subclasses of GMM). + +Advanced topics +=============== + +Bayesian Information Criterion and automatic clustering +------------------------------------------------------- + +The GMM class is also able to compute the bayesian information criterion +(BIC), which can be used to assess the number of clusters into the data. +It was first suggested by Schwarz (see bibliography), who gave a Bayesian +argument for adopting the BIC. The BIC is derived from an approximation +of the integrated likelihood of the model, based on regularity assumptions. +The following code generates an artificial mixture of 4 clusters, runs +EM with models of 1 to 6 clusters, and prints which number of clusters +is the most likely from the BIC: + +.. raw:: html + + {mycolorize;input/softwares/pyem/basic_example3.py} + +.. raw:: latex + + \input{basic_example3.tex} + +which plots this figure: + +.. image:: Bic_example.png + :width: 500 + :height: 400 + +The above example also shows that you can control the plotting +parameters by using returned handles from plot methods. This can be +useful for complex drawing. + +Examples +========= + +TODO. + +Using EM for clustering +----------------------- + +Using PyEM for supervised learning +---------------------------------- + +Note on performances +==================== + +Pyem is implemented in python (100% of the code has a python implementation), +but thanks to the Moore Law, it is reasonably fast so that it can be used for +other problems than toys problem. On my computer (linux on bi xeon 3.2 Ghz, 2Gb +RAM), running 10 iterations of EM algorithm on 100 000 samples of dimension 15, +for a diagonal model with 30 components, takes around 1 minute and 15 seconds: +this makes the implementation usable for moderately complex problems such as +speaker recognition using MFCC. If this is too slow, there is a C +implementation for Gaussian densities which can be enabled by commenting out +one line in pyem/gmm_em.py, which should gives a speed up of a factor 2 at +least; this has not been tested much, though, so beware. + +Also, increasing the number of components and/or dimension is much more +expensive than increasing the number of samples: a 5 dimension model of 100 +components will be much slower to estimate with 500 samples than a 5 dimension +10 components with 5000 samples. This is because loops on dimension/components +are in python, whereas loops on samples are in C (through numpy). I don't +think there is an easy fix to this problem. + +Full covariances will be slow, because you cannot avoid nested loop +in python this case without insane amount of memory. A C implementation +may be implemented, but this is not my top priority; most of the time, you +should avoid full covariance models anyway. + +TODO +==== + +I believe the current API simple and powerful enough, except +maybe for plotting (if you think otherwise, I would be happy to hear +your suggestions). Now, I am considering adding some more functionalities +to the toolbox: + + - add simple methods for regularization of covariance matrix (easy) + - add bayes prior (using variational Bayes approximation) for overfitting and + model selection problems (not trivial, but doable) + - improve online EM + +Other things which are doable but which I don't intend to implement are: + + - add other models (mixtures of multinomial: easy, simple HMM: easy, other ?) + - add bayes prior using MCMC (hard, use PyMCMC for sampling ?) + +Bibliography +============ + +TODO. Added: trunk/Lib/sandbox/pyem/doc/tutorial.pdf =================================================================== --- trunk/Lib/sandbox/pyem/doc/tutorial.pdf 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/tutorial.pdf 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,2186 @@ +%PDF-1.4 +%???? +5 0 obj +<< /S /GoTo /D (docutils0.0) >> +endobj +8 0 obj +(Installation) +endobj +9 0 obj +<< /S /GoTo /D (docutils1.0) >> +endobj +12 0 obj +(basic usage) +endobj +13 0 obj +<< /S /GoTo /D (docutils2.1) >> +endobj +16 0 obj +(Creating, sampling and plotting a mixture) +endobj +17 0 obj +<< /S /GoTo /D (docutils3.1) >> +endobj +20 0 obj +(Basic estimation of mixture parameters from data) +endobj +21 0 obj +<< /S /GoTo /D (docutils4.0) >> +endobj +24 0 obj +(Advanced topics) +endobj +25 0 obj +<< /S /GoTo /D (docutils5.1) >> +endobj +28 0 obj +(Bayesian Information Criterion and automatic clustering) +endobj +29 0 obj +<< /S /GoTo /D (docutils6.0) >> +endobj +32 0 obj +(Examples) +endobj +33 0 obj +<< /S /GoTo /D (docutils7.1) >> +endobj +36 0 obj +(Using EM for clustering) +endobj +37 0 obj +<< /S /GoTo /D (docutils8.1) >> +endobj +40 0 obj +(Using PyEM for supervised learning) +endobj +41 0 obj +<< /S /GoTo /D (docutils9.0) >> +endobj +44 0 obj +(Note on performances) +endobj +45 0 obj +<< /S /GoTo /D (docutils10.0) >> +endobj +48 0 obj +(TODO) +endobj +49 0 obj +<< /S /GoTo /D (docutils11.0) >> +endobj +52 0 obj +(Bibliography) +endobj +53 0 obj +<< /S /GoTo /D [54 0 R /Fit ] >> +endobj +56 0 obj << +/Length 1833 +/Filter /FlateDecode +>> +stream +x??YKo?F??W?H!?%?????N?NrpNM4M?BLIe'??????A?ha????3?3??(?E???? ?Ryy^?Q?zu{yXys??B?,???'???r/H??L?K3?U? +?h?8????n.^?N2/?a^????SQ?I????????+?_^?XI?_?8?q?t?????4??Wq?Z? }???A?????_???#|?n+[w??o~???????*E?H?I??T?q??~???2???Z+X$:??*QaRh<Xf??fU?v?????[?I?5??|??!AG..oz??#?2U?I_ I????A=?K???*y?*tx? +??OuB3???q?W????????AV??i??s??Z??: ?|[??[BA?d????? ?+?Y???[I#m ?H?:?(??:O?4?d????W?D????~W?1k??+ ??c*C?G? Z?Xx +?? ?8?~? +tY~?????????#??J??&??????Z{?????s????M?0Q%?{Uui???3?i{S???k??? ?S???U?P?)?im-?????Q,R? +?(?A?[??b??f??A???? ?I??N -?ZN'B? +?r?T?jt?c?????]???T??lVk+mug*M?Jh???? `\??p ??v+?q?IR.? +??????t?G??[.??????He??0?? ?fi????r?s ??o:???.????B?????1?~D?n????S?6?s?&??2L"??>tgu?f???~i???i??U???????????????G/I???0O?iG??[j??????bd??'?x?H!SE? ????/???}ki?}?{?? ??)??)?,7??z??5\?? ???? \????zl???2??????SX????k?\????????n"e??zZ??e???W???[3?Q????3X[?Y9???1?gz c?{I?????Y|??[?k??:?8?????&??')\?|???v?? ??VF? ??F?w?vjx? ?p?????S#?,#???i?c?F????j?'?x?49??Z??l?? ?X0?????q??Dg75??????>?,?4??c??&CRKv?FR?? +.4???[g&????nZ??}??S????v?1$?`???? ???*????I???0?{I??=????kDC?6{?'EE????=1Y]???6??6?J?????&O???7??`?ypf?????%?? ?[dl?aT&?q? c??????|8??????O?:,?e????UfA??? 6" +endstream +endobj +54 0 obj << +/Type /Page +/Contents 56 0 R +/Resources 55 0 R +/MediaBox [0 0 595.276 841.89] +/Parent 113 0 R +/Annots [ 68 0 R 72 0 R 74 0 R 75 0 R 77 0 R 81 0 R 83 0 R 84 0 R 86 0 R 87 0 R 89 0 R 90 0 R 92 0 R 93 0 R 95 0 R 96 0 R 98 0 R 99 0 R 101 0 R 102 0 R 104 0 R 105 0 R 107 0 R 108 0 R 110 0 R ] +>> endobj +68 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 589.879 145.334 599.443] +/Subtype /Link +/A << /S /GoTo /D (installation) >> +>> endobj +72 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 589.879 506.981 599.443] +/Subtype /Link +/A << /S /GoTo /D (UNDEFINED) >> +>> endobj +74 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 563.305 146.84 574.984] +/Subtype /Link +/A << /S /GoTo /D (basic-usage) >> +>> endobj +75 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 563.305 506.981 574.984] +/Subtype /Link +/A << /S /GoTo /D (section*.1) >> +>> endobj +77 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [114.477 549.756 319.103 561.435] +/Subtype /Link +/A << /S /GoTo /D (creating-sampling-and-plotting-a-mixture) >> +>> endobj +81 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [499.534 551.872 506.981 560.89] +/Subtype /Link +/A << /S /GoTo /D (section*.2) >> +>> endobj +83 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [114.477 536.206 355.473 547.886] +/Subtype /Link +/A << /S /GoTo /D (basic-estimation-of-mixture-parameters-from-data) >> +>> endobj +84 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [499.534 538.323 506.981 547.341] +/Subtype /Link +/A << /S /GoTo /D (section*.3) >> +>> endobj +86 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 511.748 171.941 523.428] +/Subtype /Link +/A << /S /GoTo /D (advanced-topics) >> +>> endobj +87 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 511.748 506.981 523.428] +/Subtype /Link +/A << /S /GoTo /D (section*.4) >> +>> endobj +89 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [114.477 498.199 387.513 509.879] +/Subtype /Link +/A << /S /GoTo /D (bayesian-information-criterion-and-automatic-clustering) >> +>> endobj +90 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [499.534 500.315 506.981 509.333] +/Subtype /Link +/A << /S /GoTo /D (section*.5) >> +>> endobj +92 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 473.741 136.978 485.42] +/Subtype /Link +/A << /S /GoTo /D (examples) >> +>> endobj +93 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 473.741 506.981 485.42] +/Subtype /Link +/A << /S /GoTo /D (section*.6) >> +>> endobj +95 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [114.477 460.191 230.926 471.871] +/Subtype /Link +/A << /S /GoTo /D (using-em-for-clustering) >> +>> endobj +96 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [499.534 462.308 506.981 471.326] +/Subtype /Link +/A << /S /GoTo /D (section*.7) >> +>> endobj +98 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [114.477 446.642 289.583 458.322] +/Subtype /Link +/A << /S /GoTo /D (using-pyem-for-supervised-learning) >> +>> endobj +99 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [499.534 448.759 506.981 457.776] +/Subtype /Link +/A << /S /GoTo /D (section*.8) >> +>> endobj +101 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 422.184 201.298 433.864] +/Subtype /Link +/A << /S /GoTo /D (note-on-performances) >> +>> endobj +102 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 422.184 506.981 433.864] +/Subtype /Link +/A << /S /GoTo /D (section*.9) >> +>> endobj +104 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 399.842 124.269 409.405] +/Subtype /Link +/A << /S /GoTo /D (todo) >> +>> endobj +105 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 399.842 506.981 409.405] +/Subtype /Link +/A << /S /GoTo /D (section*.10) >> +>> endobj +107 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 373.267 152.229 384.947] +/Subtype /Link +/A << /S /GoTo /D (bibliography) >> +>> endobj +108 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [498.988 373.267 506.981 384.947] +/Subtype /Link +/A << /S /GoTo /D (section*.11) >> +>> endobj +110 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 197.785 162.678 209.734] +/Subtype /Link +/A << /S /GoTo /D (id1) >> +>> endobj +57 0 obj << +/D [54 0 R /XYZ 89.291 778.025 null] +>> endobj +58 0 obj << +/D [54 0 R /XYZ 89.291 757.701 null] +>> endobj +59 0 obj << +/D [54 0 R /XYZ 89.291 760.291 null] +>> endobj +63 0 obj << +/D [54 0 R /XYZ 89.291 644.176 null] +>> endobj +67 0 obj << +/D [54 0 R /XYZ 89.291 604.425 null] +>> endobj +73 0 obj << +/D [54 0 R /XYZ 89.291 579.967 null] +>> endobj +76 0 obj << +/D [54 0 R /XYZ 115.473 566.417 null] +>> endobj +82 0 obj << +/D [54 0 R /XYZ 115.473 552.868 null] +>> endobj +85 0 obj << +/D [54 0 R /XYZ 89.291 528.41 null] +>> endobj +88 0 obj << +/D [54 0 R /XYZ 115.473 514.861 null] +>> endobj +91 0 obj << +/D [54 0 R /XYZ 89.291 490.402 null] +>> endobj +94 0 obj << +/D [54 0 R /XYZ 115.473 476.853 null] +>> endobj +97 0 obj << +/D [54 0 R /XYZ 115.473 463.304 null] +>> endobj +100 0 obj << +/D [54 0 R /XYZ 89.291 438.846 null] +>> endobj +103 0 obj << +/D [54 0 R /XYZ 89.291 414.387 null] +>> endobj +106 0 obj << +/D [54 0 R /XYZ 89.291 389.929 null] +>> endobj +6 0 obj << +/D [54 0 R /XYZ 89.291 216.714 null] +>> endobj +109 0 obj << +/D [54 0 R /XYZ 89.291 216.714 null] +>> endobj +111 0 obj << +/D [54 0 R /XYZ 89.291 187.993 null] +>> endobj +112 0 obj << +/D [54 0 R /XYZ 89.291 187.993 null] +>> endobj +55 0 obj << +/Font << /F36 62 0 R /F41 66 0 R /F46 71 0 R /F15 80 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +138 0 obj << +/Length 2945 +/Filter /FlateDecode +>> +stream +x??Z[o??~????1?aDR??-???m??^?F?b???|i<#??K?_?s#E]'?w???C???9?I +?tRV?T:)?R?&O6??4????N?Pd?)WYx??\g?*m?????v??hU?jz?Vi?gv?i? +?%?"s?? ?????[?%:UUZ%?????*???????????%?fc?B3??1GS????????S??????w??@5???,?J$G%???????2??? ?|n??>?? |.N?????4Oe??G>?6?G~y???5???-????3_?G^{?d?=|?x???M?5?h???kv?S?????y$?D??????$?9?i??M???)VNs???? N????=?? ?>?D?EFP?q~A?4?A?????\F$?i??,=??? ??? ,"?#???7R???6 ?=?b??"??#?:Zk?????cd? ?S?`?;?C?CU???8?V&M?>=??Rr? ???~????_?0??? -?m?G^?0[@??We???c?????]5B???)lz>4?u?S?@?T?l? Z=??????b???~?E?l?`?:x?|m?B??a?g?> V?????@??x?i?$?c??H????b???Uw?????????i?|???KO?z?[??y??45?Hl9y?met??'?6?8{ ?CD?D?K??Q??z???X?9?9???!y??$?MQ?<>?h#?^?;^zhed???=-3?~ ???]{d?b?5Yot |????7?G?Z????^8??,*0??_4?h??y?kD??????.??()?T2 .?1?????K?l?B h??d:???@$?!???? +???X@???OKK?a??s=?:u?/???K7:?>????f?.MI@??j?5?%OF???cAe?|??l?Q??T???n????|L??_?7?r7??`??P}?? C?Y???$?@~-4&?????s?l!S???????2?Lv'?ZA?o?'??????~?u??8??y?l???\?$?|?o????~? ?+????????|F??m?L???????DC????f?8`????ri?i?B??8Us???v&????????0?A??????#?K5??t2?F!?h??.????%g?mmH!c???PY?%y}xa??&B???'z?????k??f??5??z?????;??{rYSx????o?`rh?????"?'???s_U?!9X?T??T?x3???9KE??@ }?? ?? +?.~.Oa?7C??;w?J?jR?B??hH??tFY??????-??w???B??1?f0d???!????.?D?e?????z??QA)?4Qs??|??+4q^?d??Nj1y???7?r6??}??9b??(E??p AO??~? +???6R????? ?Me??+?28]g?7??B?6?J??95???[?????,0:?m?y???}??????g???????R????*??7??}?????/SF?|??]kYv?M?miY???;?#CE??R`??)h??z?x????j???k*? ?)??????d???)vX*3????????p????pl??????? ?c???GG)?*?4??#??h???!?o}?S??>??x??%??4??? +??????Nm#?JN?e?R?????.????L??d???O?+?????|5?\' m??+??? @?qb??r'?6???x xp?9;??@(5?9?g?~?/??N?^v?_A????z'???c???Q??8?^??`? :?E/??-?????Q? I???v?K?? :I.?T1??8??????? ?P?R?Z?%UD?LIj ????%o??{?EI=U,?ge?yi???=? Te??3]?M????! ????????? %?pT&A ?????g??R?2????]?5F????hH;p?#*??:7v?'q??pEA?w??????&?0?$j?R???? ??w???S??>"???????J$?_???U???.??h?|?H|?????(?i?C9a{??0??@p?^fQ?fB?H??$??E?I+???Z+cLa???(8?pJk??0Wd<i,C???Hc??~%???h??J???P~7???&??T???4?GP +U???{Jn?Pv??";o?k???%y??? ]?????????w??i?k??U.[}=?e?V?<???OQ?p????u?{^i M?c\9*?2?_??1W,?\h&*d??????&?;????????????4???????J??$?Pc?A??g?W??2U?rQq?fBq~v?)? ??L????m???/:?Nh?t???9?v???8?<;?M8?v'?}??NXt???j??q"??_;?z????\?l?????????[I&????)_fF????-s,????Pe?T?H???hu????v$?#v??NQ? ??L] |?R*?m?j??.????S???s?A>o?c?w6?W????H???ep??5?L??/????/??@i?f??&{?> endobj +144 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[0 1 1] +/Rect [372.194 691.505 397.859 703.185] +/Subtype/Link/A<> +>> endobj +145 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[0 1 1] +/Rect [316.488 596.661 338.487 608.34] +/Subtype/Link/A<> +>> endobj +147 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 548.094 164.658 562.826] +/Subtype /Link +/A << /S /GoTo /D (id2) >> +>> endobj +148 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 481.478 319.705 494.086] +/Subtype /Link +/A << /S /GoTo /D (id3) >> +>> endobj +139 0 obj << +/D [137 0 R /XYZ 89.291 778.025 null] +>> endobj +146 0 obj << +/D [137 0 R /XYZ 337.491 613.322 null] +>> endobj +10 0 obj << +/D [137 0 R /XYZ 89.291 569.806 null] +>> endobj +115 0 obj << +/D [137 0 R /XYZ 89.291 569.806 null] +>> endobj +117 0 obj << +/D [137 0 R /XYZ 89.291 538.301 null] +>> endobj +14 0 obj << +/D [137 0 R /XYZ 89.291 498.741 null] +>> endobj +116 0 obj << +/D [137 0 R /XYZ 89.291 498.741 null] +>> endobj +119 0 obj << +/D [137 0 R /XYZ 89.291 475.438 null] +>> endobj +136 0 obj << +/Font << /F34 140 0 R /F57 143 0 R /F15 80 0 R /F46 71 0 R /F63 151 0 R /F64 154 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +158 0 obj << +/Length 2788 +/Filter /FlateDecode +>> +stream +x??\mo???_q at Q?.|???????6Z M????u?T[>U????g???? ?????N??y?3;/r?w ???qRf??aUc?????fw w????? )???????<=????????=2;????p]?[?? >????w?p??UVBE???????_?VMZ????SC5F5-|??????Pm?????Bn?w8?????yz?Dt7?&?@??M*I??DO???Y?/ ?6 +d[?f?tN???????g RIb'???(??6%?8?T? ?w??$pfRm?o?9????8QT"Jk!?y?HS??898??;t?_???? ?x?H2?????????????e?&xvF++i*?'?2?3M?z=n???E?Q?:?l"??U7v???q?k?????4?"J????f??7jeG!? +t??A??C;T?(k?M?,?Lp3??Rmz?L?D???A?H??L?r?(?2?r?:?%????(r?f?DP.?T???(:??e?????I#'i????( +??T?4??S!???Z?Y4^%Fq???( j?8s???E?(f`???D?*a ??T??*1?? ??rd ? +\?)o~ ??O0?????8Q?)C???2D??S??#h???h|E??I?OEG????H?OM?????uf???Y?"?l?l?"?:+??hjp!'????2fT???U???=?Be?C?n;???b ?????XY?pXYq?be?y?TYUD Q??J?9UV2\????j?b???%?P?)U????'Y1HS? 9ec?]?gN??,J??g?kJ[??$ ,??szle?UKe%K???K??D%Pe%?E???j?????E?P???PY?3??J??8=??????d?"?|??????*p!?B%?????4^%F?~! +P??4??2l?)W>pd?o??U?ik7??/v???w?5?u??+????r????)???;y{??Z??(+ip{???k?8????uo?v?h#??W???:???????I??????2u????q'?@L?W???u????????#u7??u?<5" S?78S?/Q?lk?n??s???*?Y????\>!??9"$I?? ?I +?&?af??7?"?J??-U*P?????R?? ??????:?????n?y? +\%!<;? ??????c???W??P?+??e???G? +\????N>?????_W_???)^???BF?P????l|?s????Q??cT]???????I???????\>A4,???e?cxZ ????<=vL?????7?F???2<??????Y?F?4??3Q? x??GE"??rT???"?e8????e???????!???????UU???({??U?i?2???????@a ?????7??,#Rq?TZ?f?|?H?(?E???H? ?eZ?????t??OE?-????;???qrpR????????r F????c?,???\???]?0?a?E??|]?s|tX???????T6?I#@=&??"????n?`?X!9??IRM?b>??m:!??e???'?|?g 2A8B??? ?E(I?=I ,????8?&E?????????^?%??L? 1?,n????s???S?^'B?????[/?E?h?Vx??[???q!J?????? mAIAK?O??8U?6?#u?9?????G4,?}?f:+??Q?W??N5?g?L????&7^????????h?(??D???????T??????.H?????srp??y?~?9??x;o????1|?j???7?d?#?FO???(:?'?E???C?Vu??*g?e???(??????rGR???BN?r +-?`?[Z??8??*???i"Q;])B???5??7????????'??g????p?Zj?????_NMH???t`??kLO?8?:lc??C?? ?l??`??._?Q?y????|dn???R??Z?????$T??f?"??C????S?!???v*`!U??JJb?f?r????!F~aBI??????????V???^??= ???d??L?cN< }????t?u?t?1???8?Zz}fp???f} ??5????????F??'/????B??WC???L?B??gL?@> endobj +159 0 obj << +/D [157 0 R /XYZ 89.291 778.025 null] +>> endobj +156 0 obj << +/Font << /F64 154 0 R /F63 151 0 R /F15 80 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +162 0 obj << +/Length 1557 +/Filter /FlateDecode +>> +stream +x??WKs?6??W?7j&D|??L?L2?C]:i?D????#?y??? $HQ??v??p%a???aYuw???????#????????? ??~|????ly?%?U???_o??+??x?*\m?W????d???l? ?^{??;Nc??y?????m??x(??JmI?iI,J?8lj??wd)??????? ,??z????u??O?F?:??Y?????E??(J??~?2:|XG&c???6????{??D;?w??????l?%??;@1?? ??????\O???xt/???c????????/8????p??E&K??G?)??S C?{u??#?[?2??'?s?E???$?=u????2a?U>LiC?w?y.?`)?$?????m3?????? 7Xxlm?L?3??5w!?%???&V??M?L?D?c?? ????"D?$8??!???G\???i2?X??"2?kysj[#?Z>>+4????????(???r?K?]?"8??????fl?&S? ????(??}?;???V??I|]a?3???eVv??+??????u??l?4????e???o???gHQj???d ?SJ +?"?"?03XqM????A??jO? ?Y??*???AP2` ????R !???????????K???w???J?ISc??Q1??? ??D????q???Mg4m?J?Y????N?Xn?a?d?Z8?f?t??Y?????-?_?,??i ???;??????Zm??B???????(???}0???P???T??u?V;?d4U? ????2???l?r??LD#? ??u???? ? ?#N! s} q??o?"?!0???1]?L??]?MiU???b?s\i??cv?4??_???j +endstream +endobj +161 0 obj << +/Type /Page +/Contents 162 0 R +/Resources 160 0 R +/MediaBox [0 0 595.276 841.89] +/Parent 113 0 R +/Annots [ 164 0 R ] +>> endobj +155 0 obj << +/Type /XObject +/Subtype /Image +/Width 600 +/Height 450 +/BitsPerComponent 8 +/ColorSpace /DeviceRGB +/SMask 165 0 R +/Length 17851 +/Filter /FlateDecode +>> +stream +x??? ???????k?k?n}??d%&?-?h?i????????~I:?v? v+??? ?r?A ?? ? A at EqT??A?Y?; ?????n??5??????X8?N??????????J?9s??s?9?_??0>P?\t?Ev?C??????s???W^??Y??.?l?????\.Ftp??????E??x0? +?\H?9s??s?9?_???{?W4o??m???9F"???? t?|??????[???ceeemmmEEE?N?0 +?? /???M]]?]?=?\3 ??w?y +??3????`b??SN9???A>??????S1 +?????????U6v?????????wm?? :?G????????;rs@?(? `(L?s??????.??9?????$??\?P?:??gLp??Q? ?????/L???(??q????F???"2@\.`????2 ??1?&?Q*??\?(DI???y???A???r1 +?? D??-\.`t????b?Rni?.0 + ?Lc.0 +@? ?p???Ie21\.F(gdep??HIeQ\.F??g??\.F?&g?q??????B>????D?q??Q??u?>s???g?2???Y?xcNU)?!??\?(?5E????8zT|????S|????Y?[wh???W?????\??x?=?/b?z?e???C8?W?-?2??L??T)C????F?"??Zl?$?^?{V.?l???x????*???W???}?c?t?2????@?/OC????e??????( ?6M?Z%?o ?zt???????T D?*.0 +??I+V?1cDEE??L??????-????? ?u?S?'N?IWy?S?(q????????/?w#@???(v??????s???T??c????J`?? ?t??B??~@?5IC?}?T?8~??E???.%??\.F?r???W&?1t5JI?????R?B?1#??k?m<~?id?:????T?+???'??]??? ???????????(c??F??????????92I?c=??R??1z????????^?????^??????_E?a???C?? %?QRi??}????n???9????n??uh??[??????,?Jw??+?YAj??5jNX????b?????????\?(?` +Hg?z?2D??#?O{???????????g??5K??,??<y&?Z??s??"G????w??C?;??????t? %?`?`???M????????????:?5 ???q??s.???8rD???n!? S???=rVq??Q |3??{??9??Kz??b??m??8?+????uG@ +??{?V{'?rM?????r?????8?LF????\??s??Q?-??fl?njh!???5h???(?? ?????r?@?? ??jj???????????1?M???g?L???1??? ?\??? +kk????\?(?C8???????S??_???:_xw???=??Q???G?p??Q w??- ??g%A??r??9gd~??6v=}?`?K??|7q??Q ??p?p?rsl?`C?C?????|?(,?]?L?????Y???q??Q TW?]?UU?u?RT?$r??jzJ^???8??c +t?>e?{?u?.0 +??/2?~9???x??\?8??|T??(?](???6?,??s?rJ 3*\.F?\???7?^vp???_x?????=??tH?|o?????+?V?|b?_^hh??????8+W??l??????F)F4????????c??d?__?j~???;?$? Y?S?:6?I*???F?be?k??w??z &?mF?d?G\.F???57? ????.???~?j??????D??xf???;????[?t/V&??>?@t??jw'???_?G\.F???????"3?'???????q?e?o?5=g???????|?????au???X???????Qb? ????a?/S?A\.F???? ?t????=.7^?M???To??q?A?????? ?]z?0-?5?H2?k??????^P??\?48 (^x;~\.F?l??6l]??~????k?2dp????????? ??-[T??|?'9??I???Ul????HK??Q??X?L??9q???wc????V???\?c????J?????VT??2F?%?<\.`??d?L1yrCT.i?:??X;?#@h?&?&???#-?y???l?r1 +:???{????ECC??7 at X??b??#???? +?2???N???"???8:???????O??@q??H?^?`?#K???U9?*??y????H;4??u?|??xOLx??:I??x???????u?F%??K?? R?b? `8??)??7?5?????? +????? ???o?Jt????R??????X???C?l0w;???4???^-&M?? ?d??1th?x-???;?r??I??y???????.?M ??G????[??F)Mz???????n?V?9jZH?i??-?~?Yb?????R?$:??u??7lJ??T???????C}?p?={f??p??????Y??t???c??=???qF?_O?=D??2???8 +?X;"C????????3?L?Ucsw-?o?????U????Pd\.F??d2???D}}?uP3=2v?@?{?u0tU??:? ;????7???S???Z???;ZH:H?\.F??????7/{??]?:t????Hzo??o?"Ik????V?D????%Yqo- p??r/?#y*H?{?Q?vm??????\?I'???????YY?=?[AN?2??19v?[?fS?=??Dpv5?,*?p?J??a????O?????? ??Q ?M????4???W??2????C:MD*?m?????????`???W]??F?3????p?J??a?;??W??p??QJAyD?_???8uH\???2j-?5kvR??;??w?;?fP???e????l?4?C????9$?s>V?V$????7??? G? ????,??Oe>??????5Se,?O4?7???uK??1>?x???c????????i?1?? ?o_??.0J?#P?????????????x?]???6?<?$F??HU84???????"?????b?gUT??g????#FS0J?????Z????}?????? ?_??Yr???sV?#?9/?T??*.7?:?Q????+W&?A?Ms?=?? ??? ]? nGU??@?}???o??k???q??M??s.I? ?^????f??]v?e?m??py?A?m[????(?YDZ/g??#E??t???{????%N?5???? +5?t??3?e???t??????l`?F??-ah?P??????9VS ????_?m*?U<(??-I"m,4???eT?0Jeee??;v0P?g?`q?D?c??*(?}??yN?? u????s???a'????U.5@???5 ???D~Ud ???j??????J???O?0@=?'?????_? ??:2c???^????/w?????*=??]??i?? 9?K?,????:u???^??IQ??N?????)???4????????=?T?{????????{?Zj?0????z??O??W^S?*93??i?????????=??N\w]????q????}??????)O_2??g?s?+8g?????o?|????:H?F?????2.??p???1}?????5???VC1 +??i???8?'??Er????t?56??(????]??U?????????x6j4u?s???????[U??~??C>??7????r!.?@???H???'?j]?t?????????????k??$?l?G?>}??9~X?o???| ?V???=O??? n,n\??6?1cTY???V????(?iR\.F?4?G?.???? +?ys???$u?;???3??oCwh:d?I9'?^??J??}??[??????U??[?yM??????M?C????A\.F]??U???M??z???d1b??B?9?]????)F???I???<?I?7]?2m???y?9R|?Q??DXv??????D??|?glf c????~?l?|4?l?6M???(?<~ :?U??x?t?????}???^jG??i?? ???+??N?=?VG?H?{1t??P?????w?k?G@????x?P ??(????x??????w?G3???*?3,????VK/9????Ib?&# ???&??6??vL\.`?????^???[?3?{+??}??c*?i??? .????'?-=z???=~Oe3?????P?|???+??W????.??????????????????=?????:Z???V???]??2?/3????}?jt??9??b?I??2?r?Q?_?c???$?L?>?y??=?^?iS.u0{;/?G ??????>?T Zh:U???g????????????b??\?jf[???????????4??1?M~=S_?x?N??tb??????O$??/?N?>????!? ??o2:?B?$H7?Lk???*?4%??????'??,?O??k?4^5??????.~K?? &,_fz?A? ?????FO?.??????p??1Kh?ptT? :s??????1?\.:??????%I;/??I??`?? ??pKc??V???-Q??plq?dyL?:T?_?v?!c???? ?/;??J????Y?h?????????F?S(9?????????.?i??'n????\?R??x?????????1Q2w?W??????'N?Q~?3?s???v:??????????t???z???K8???e?????}???????7??6K? +7.?@R?\?Vu?)?8W?b?7!?;????i,m??/??6gO)???Ug??(??>9j?X??s??;???R???O?m?n*???mr??>Rep?A?(%G??I??s??XE????{2Y:v?me)N?Y???*u?2????????^F???"????????N?s????C?? ?-??Cq?u?;???NF??a^??????????????.?????w?H-79~??ix?E?\V??kz??????Bt?j?????b?????H????g?n?Y???{?????Ss?7??????M?JH????,)??^?????\?,?2??^dl?dt???y?v:?p?k???cZ?T????o ?z??M???{4??;?82?? +????E ? b?????????.????z??`???)?i???.6??m ??Z???NL?????z?G5w??w?[??b?t??lm?s?????jc??-P?f?q??1?3>?Ob????F??QX&#^]?&???O>??"? ??K? [??q?g??X?v??Pe?J?=w??????????V'?$>r?f???^s??.>????B?;u??h?.??7??????????????????????X?? "???JL&?Z ??id??{wp???$[~?C????Fr?.??C???? ????|???Q?[????3?$??-h_????jn???`?6]rw??k? ]&?1?\tt?E]???t7i? Gj??}???Y5??????;]?R?z?????? ??.f?R%???W?V????W????v??p4+?I?fNJ ???A?}???%7??h???5?d? U%???d??R{f{f???Y?^?k??a?,t?9Re?L????=???=[??/,O>)??'1g??????1?F???:Y?3??????Q5???????9?~????|?g??c?v????? ????#G?9L? ?~?N?e@??$?S?&?d????????;d??6??r1 +???A????)?.???U???U??|??? +$??????/0????.q??u??:^I?;F??^?Z???????1T??)???hm??N?n???E???GX=b?t??F????|R???c;?H:????7??^????)6l8???`prK????6F????c??C??f?b?l?l????\?(?=??%??9????? :SY< ??y??6?=??????]?K?:ni??:?@?q??Q +-)? 1K???????[P?)?N??K?J?????yJ??L:?????w?????W????[?e???r1 +???57??~i?k?H?c????`?o??u??{?????Gk?L]?W???u??N??????W_??\.F????8?B?Z???A?b??Um>d?XU?6?WW?8?,???+?>??????7.????$??(R?s?n%(? ?~+jkU???CUm?!C?????????S??8PU9R??x??????sc????%.0 +?J?V??kBM?x?5????6h?????U5???3?d?{V?V?;??????????No???jq??Qd??L|??Z||?]?-[?J?L???e????;V???6?????r1 +f???????ss0tI????uuJ???u?+?6????3?S????O????G??yb????~??k? ?b?@~?,q??Qp?????Q?.+??)??G?p??b??q???G!?M +???~ ???????r??????yW?|9U??&?ut+7e>???8?A??5?=?*7[?N??e?????%????9d???u?\(|?dO?p?????7 +j?\?(?\H??{?^q???7o???|?Q?J;???L??+++kkk+**:u??Q?P?H@??s?=? ????wF)R???E ????rJCC?|R__???b??q????M?????s????%?T???dA??????V?c?e~,???.???w^???!y?????KC"?@?K???9G??7?P?j? i,?_Q?:?=??X?l?o?????)F}A??y????????#??U= j? ????SO???E??D?????????;v??0???*;v?(yL?z?}T??n]?h?;??6?[S#?{N????d?J??]K?? F)?K? ? ???K???B??r?M??7?V?D??y?/??t ?U??A?y???u???t????Exc??????+??}??Q??p??? ?`??q??g?S???4??h?]?????}-?+/?@??J????????????????L&#???R ??&??L ????5J?MP???_???>??? +??:eb;^S????0?q????????N?o?&?I?S?>????V??$;?-?q??b?B??w????^H?????P?J???8O??=~\??VTDJ? ?????XJ??t???R\????A?????????%{???aW?#?Z0??7???6J???v?V?Cs?h?U???c?Cd?$5??Q????Sx8\????B??`&?j?9????Z???AQ?(???t^?yn?????n??????????\t?????r_??p?X?$?#?ybr:wV1Q???5g?2Qut'X????????A???k????????a??C??B???!?????4z?IM??2?^Tg??y at CvG7 +sB@?^?????????[??+Y%?e???3?C????r???1J?r3i????x(????7?tP????7 ???=wg??+L(X:?k?_???z???Z??>???'?m?w?\??L??Dn<????9R? g???1?B?cX4_??-???Y???I??Qi?u??7?????. +?()/?????J?j????I?^??0?uk???|???#???q???0??it?~??+??![w ??m??;?q~G+??:? FI)%??>??L?O? ?k?5>?D??F?$?\ts??;???`$??e?%v??T???u?? ??????e&-Q#p?P?F?.??q???Qks????i??C??I?v?w??g????V=???Uk)????Nc-????6?????????F1???_??/{OyKA??????v8?aOH?E=?Gt???!?S??????????%_?# ?b/?+m??R?D??y.J?(?w???????2_??=?,?5?$.:???d??YQ????????????>?@??? ?M???ul~w?????_??x:???????H>?y?i[@1J?tp?{? +??A??F???J???????Xpl}???OS??y2?j?&???????n?^??????????^O=?p??Qt??U?'v '????9?`?n??3?*?&x???u???]Hh_`?Bd +??E?????? E'?[t??P%??c???? X[ ?S??ro??????????!v?7?\)V???\?(:?4??#??8??_????~?j?????H?U??OwT??3?I????;??=n?C?ho\?@?]?p??Qt????? ??9??\hw?3~??va?????D?Fj,1????T:1?&?]??.0??'?E?t??q?"bh??????w????/??-?Fm???T??u?2???u?r??r??rp?TT????B~????M>??1rd??24]$7R?.???KQ????|{2j +?3???B?n_??B?%????????7J? ???KMw?t:?9????)?^g??.t??{}?6C_????!??? ?`????????2kR???B?DS? ??????A?????b???5????K??, +?\(F????,T_????K??g)i,??`1??>Iu6'9=k??u??O?????h{ ?P?E??4???]M]?>??B9%?S??}?F??~ ,=u?Ty?F ???M??.?]*??j??39??.??]??{????\@#???X?K?(?bo??y(?96?{?+T{?????& ?A?Rp?L?t??7?1cT??a?TjMmm??cz3d(P?+*?o????=R?i7?:?QJ@SYt ????b?R1hP?&.Y??R???,???AR???\????>???G??_'N?Q ?B!%gy}??cR_zI??????eb?N?;^?Iw????}?C??6?W ??U??????T-??;%?At??(??}??B??????????gX9?y?1g???{????P??S?l?}??j~X??2???????7?P`r{B k\.F) =??^7y???]?R1 R[???#U???3U??g?+V??????X??X=??^u?+m??~a8??c????M???>??4,:?QP???L?:?\??G?"f\.`?!?CN??')??'?A?[??3.0J??!???F?:?SPu8?A??F)???@t0?2? ?r1J?F|?r?n<_+e???r1 +:X!j?N??I\.F) +p +NsBV@t??Q???????????9??:\.F):? DM2n??\(7,?T???r????{?s???r?0?\(g?.0 +.0J??` .??Q?V'p??\?????\???'d??rq?B)?eD?Fp?i?T???>eN??Q??H*??r????a??7???Km??Q\.F)g4U?? ? ????F!l\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.`\.F?(?\??P?r1 +?r1 +?r1 +?r1 +?r1 +?r1 +?r??9s??s?9?_???{?W4o??m???y?e??%`??? +???p?`!p?E??????c??????????N?:??Q???J?j?q?? ?*??B????????{????;?}d?~?Yg????????????E????/????N;?U?VK?,)?K????-?]??,X?????SOm????? Q??a??a=z?8v???U????r??!s???C???z???n??yg?}v1^???l??7%E|????K?x????R???d???%p!????b???8p??K/-?3?K???.pS?Z????????^|???O?p?i???7???P}???EzO???u?]'??) ?{?^?n]1????l??7%?H?????-???o}???O???h?/?????=?H?B>????????}??"?^?R> +stream +x???1 ??/?b?$?w0?ys?? +endstream +endobj +164 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 308.331 364.082 320.94] +/Subtype /Link +/A << /S /GoTo /D (id4) >> +>> endobj +163 0 obj << +/D [161 0 R /XYZ 89.291 778.025 null] +>> endobj +18 0 obj << +/D [161 0 R /XYZ 89.291 325.594 null] +>> endobj +118 0 obj << +/D [161 0 R /XYZ 89.291 325.594 null] +>> endobj +121 0 obj << +/D [161 0 R /XYZ 89.291 302.291 null] +>> endobj +160 0 obj << +/Font << /F15 80 0 R /F46 71 0 R /F63 151 0 R >> +/XObject << /Im1 155 0 R >> +/ProcSet [ /PDF /Text /ImageC ] +>> endobj +168 0 obj << +/Length 2575 +/Filter /FlateDecode +>> +stream +x??[?o? ?_?@?C???k4:?^Z??>?~)????k???`;??}?j??'?u??"?d3C???ERZ????? ^(/w? ?Svw~???]????H?0??????r????V#?????0$|{??eO?B?????ZH??w?????Nv?w~wz9+?4????7{????7??~??_?y????X?\??#|??w ?'|u?g??????^????????%?3??!? ?vz???????????y?Z??i54#M ???!????XJ?'??l?+ ??????T&&?5?j???*???r????,TQZ??/T???4+?2NAL?????^ k?f7%??ni&7?'??????U?D???? ??? ??4~;@H??y??????w???5>z?u??-!9????4,???b?%??0??6I???Mo?1??ii??}?????s???&?j8?N +???#b??6+(!M?| |?V\z?r?Od^???|???????s?sI???$?qi???jz4n]????H!???h#??;?T?????E??p???@??5?F?????????]gB,)???%Vw???" ????????#@????`?7? ????DBM????)6? ??E?????D??a?T*??jW?;??p???P?O?B??f?gr8~OTR?LJP??B? s??d???I?????4?07W?D*??*??JH'9-????3???FiM? ?v?$?y?*m?U^?$????,????z?????$???:??Vc?b>???@Z,?GY?%?i?????[??N???`?4??N?&]?O'???`?_?M]?????X9?????kN ??Y?8???=aP?yK)????????*e?i?Y??K??6-??^?y?q?????uD? +,1XG???fM??SwE??a????&?H?a?J4?????????6&?g?@b aO?I?#&?????I~~?????@X?RC???6?a?Q?A???s??H%d???/?L5????TK)?mo\QU +??w?xqx????R?V????5??P?48LU??d?1M L????????1%?"???h?????-?????4???T???R&m: ?cI?;???=Z???W1?VEMI??J??"??????}G?????y_"???R?????_????fAz??p?m?q3?@Z????~?;? o? +(?CB??iL??-?(????2 +v?@??z?xb}l???v?z??Q?!??N?l???EB???[?HuMnF???????4i8#?8/?v1_C:?[_?2?}??V??2?? i!?7N???!#Mi?J??x??P???BF??Ux????.?*?????YYX?Y??{\? +?H?????4Z +;??hY ZH??ETi?????q?o?y??q???Ia[??c???q +??Ik"??w??p??E?%?????}????????N???????) 'U?s??c?~?1d???5-??j???G$Z??.k-???B%H??.d-iV?e???-?d???%??(????V?N?????l?}?N?%?9?E}=???-??E????O?e?HA?Z???qFH?6???]??+? +rZ?M?HC ???cv?}?K???1??3??6????,?=/??!? ??????Q?????| |?c+?K?????W?\?? ?|L??]????S????*y?`?pnG?/?t??????? ??;???7?N??|u^JWY?E?q?> endobj +169 0 obj << +/D [167 0 R /XYZ 89.291 778.025 null] +>> endobj +166 0 obj << +/Font << /F64 154 0 R /F63 151 0 R /F15 80 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +172 0 obj << +/Length 2668 +/Filter /FlateDecode +>> +stream +x??[[o?~?????h?./??=E??4H????omQ??Z6bI??????;??rwI?VrZ?l????o?R?\?O.?F?F.??E?????M?????7?)??D?hx?L???Mm?????7???,?M?,.oz)????????r???R?^^???????Yn??@?k????Gx????^?q}?_??|?????h??????z?w????f??????????{??'7? ?`?p?????E??P?g?!????u???u???g??U????s%?(M????? +?/?????bZ8?????]??????Ec????? ?M?T}??Q e???BI?B 'P\?:lL4??Wn?%:{\3?G?C?$?Z??????V4??88 ??]????18i??M??o????e?Bk????t?N?;o^?QY?P??{G~??(?pv?S_X?'vg?s?(r2}?tom| Sdw,4???Gp?BT??G??o??W??_PKP?,??7y??5?????L?????8???? MM??#.?? ?O?V0?|??1??????%???Y???E%|?????}??ZY-L?@3???p??T i$]P??????????z?h? +???@???R?b(???u??h???????@jP???w? ]@????csc" ????7?;?\ +q????.O?c8??????????K???@??3+?Dm?S?/???B?E??} :?v~?????}}??H}+???._c@??l??????rb@???@y?/??l?s?????oi?4????@?P:??B?hC/?(>?D????????,?n??????0) $?3???-?????\o??^???? n??*[G/P? W???]??n[???W(?k???4Kq[?J??`??hv-z?????s????????k??r??8n]PPY%?k?x?X??l????ei\Vf??a? ?F?*N??U?=Z????M??w???1??w&y?????m??i1f?3?Y~(??o?h???.???S???????i????p?eb?????s?}?GY?ZWj?|?iq?P???9???9?=??4?????l?????NSF?8?1?1???6L%???Y- }?r?m?.O?Y??T?}g?:??{?q?N6LKz?Kp??h$????????18?&?1?>`A?????n??nxm +5? ????c??K*|?J]??|0.?? +Y?CbK-?J????????s!EQ,????G??n??n???|??4??Kj???~?b? +T??&7??Ka???3bs?????T??Eu?? +???????????/F? +k?J??0?P3U?zk.?-E??_??5???>???B????+a?l?+????rn?F?R?l{??????)n?.!??u?s?+?k\??=U?x?x?!-?\~7I??/??5? ,e]????B?^TI????!_y?<??P#?+???I?WS????? +?a}:0??????????f?????$0?I`^?V_x???1M???R????R?M_???U?????P??A?0\??JO?Xp?z?D0??W????t03??{??????=U???'?HC+s?W??e6SFX[G???s??p?:g?V???]F#?dd?=@??? ??l?4'?M8?8w??.N$`)??EC?O3,?"????6?7 ?_ "8'R??gP d??GPrA????'N?d??Ir??Yq??6X at P?&??9?9???oi? 8??gr?c?*??$??-Y?+????N?? ???E]? ???? ??V?K????NBY??P?F?n???~ +P?:??9??A????4?Fi?W???(qL?fx????????????p???? ??B???t??v??v?@????? ??t?P?{=???????@O? ?8+n? ???t?g???5Z?_???z???Y???Db?i???o?g~&6?F? +???h??G?!P??????J???7,??a???}?L9????????.???????? ?^?????? ?B??t???w??Io?A?_????.?e???P?????-?~?t|{???{3~??b q??Y?????U? V??R??_?Pf??_&??Up?U#ibw~?=[y?[??L? ????*?4???9??-?;v ????t ?-?f?O?P +KKPV&Umv????)s??????S)????)?V>?????:???B??ogw?Ve6&> endobj +174 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 654.415 197.668 669.147] +/Subtype /Link +/A << /S /GoTo /D (id5) >> +>> endobj +175 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 630.143 397.126 642.751] +/Subtype /Link +/A << /S /GoTo /D (id6) >> +>> endobj +173 0 obj << +/D [171 0 R /XYZ 89.291 778.025 null] +>> endobj +22 0 obj << +/D [171 0 R /XYZ 89.291 676.127 null] +>> endobj +120 0 obj << +/D [171 0 R /XYZ 89.291 676.127 null] +>> endobj +123 0 obj << +/D [171 0 R /XYZ 89.291 644.623 null] +>> endobj +26 0 obj << +/D [171 0 R /XYZ 89.291 647.406 null] +>> endobj +122 0 obj << +/D [171 0 R /XYZ 89.291 647.406 null] +>> endobj +125 0 obj << +/D [171 0 R /XYZ 89.291 624.103 null] +>> endobj +170 0 obj << +/Font << /F15 80 0 R /F46 71 0 R /F63 151 0 R /F64 154 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +179 0 obj << +/Length 3671 +/Filter /FlateDecode +>> +stream +x??]mo? ??_a?(?????F????????/?!?rA?q?Hbvri??K?????j?????K?yDI)q&???????tzQg?4w??g??g?????J5v?2???/w??nP????L?????&???????_X{??Mf?&?]??<?????L???/g??z?(????????zx??'??5???????[?????{??~??s?7??7????????(vqP?5\??}??/?????~K???/????*??~???s???>????[??y????~z?g?;c?3f?|?P&7?P0?0t +???9??AU?? ???a??e??0??'|????)??(???3*???a??? ? 8???6?{qq?|??%~?.c??/$w8.m??a +????????T?? ???????g?Y???7BH??????t +??? ??????M2??[??? 0 ?Op>?=VS)????r?? L?L????S? * ?J? ?yY????#??c??9$2??????R????"c=.???[E6?c=[Z????l??U_?4`???8Z?B??+???????(e?I???dv??F?K49?=?Y@?&x??c???yJAn?S+?nU?????????????? S?`??(?? J ???rJ??f#?"?l?????????0]??6>? ,???`]?%B??o????'?~A?0)QH?????'?????h???^?????zn????zZ?8)??"???H??8???"??#)a??A)Fs??? y? ??d9????B??X]?)?S/C???(XA=r?R4????V??Sm?? ?F???9f????e8??1k????????M?????????j?a??Y???Mz6h%h??F??C??1xT?.[e?(/e??????rG22??a?]; +???]?HN?|:??~?[?R???[??,??d??bMt?y?Nr????Q???????M????L>E)?jZ?????z;??????? ???W?8|$?k?am)?0????)?}?0?d???????' 65??QR?-??a??M?{#weX??_????8O??? ?>????,??}D??6??????@+?z???d???W??h?a?r???0?????>)G?q?2?1z?3?u-O}(?c??)??Y?g]???A?0~?B<4???`?R???Z?+? ???a???V??Q`?{?c7Tl?c?s F9???4?DD? +?UFFK59?{L^?|????? ?z/?j_?%3MP???J??h???M??o?????~??TGEW*??c??X???mZ?????_d???8???~??/???6?8?q?s????W}????????6????I?4vxJ.n?????Wl?9???X??????3i?^?R?%09?A ?\?IH'6?`* k5?/-?(????L2?nE?a??:l?5k?nV?.??g[?12*u+????s?Z???s??T???B???s??p?????G????7P??'(??? t?I?w??-??$?An0q?fu?7?ycBC??\?#V +?&??kO,??a??????|@??L?$?V2\?)y ?p?????y#??????}???E!9?bT??W???????.?7]????4?N?Rv]x?&??*?d?D?Iy????@?M"???g???` +vD( +v?F??]E?Er??j?O??!@!9?bT?? k?&I7PS?%2NP?$+7Pc?%RJ?VT?N?D??L)F??a?H@?K5m&Y|?4%Y"??!I?d?;"$?YO?Z{??,?J???dI}?I?II??%Yv;???r?M??)?{Xs,N??a7?bMb???s +?d* ??c?=?0M?K4}?+2P???'(??,?-?)?h??/??O????^???q????)Z?2|?0?&??y??M??}?(??=p?jc`??mB???6(">m?XT???=??W??????.??n????z@??d???@??L?kl??FB? +endstream +endobj +178 0 obj << +/Type /Page +/Contents 179 0 R +/Resources 177 0 R +/MediaBox [0 0 595.276 841.89] +/Parent 181 0 R +>> endobj +180 0 obj << +/D [178 0 R /XYZ 89.291 778.025 null] +>> endobj +177 0 obj << +/Font << /F64 154 0 R /F63 151 0 R /F15 80 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +184 0 obj << +/Length 1224 +/Filter /FlateDecode +>> +stream +x??WK??6??W?R@l???{k7??&M???d[????egw??;/J????B?h?3?8??H??? +??7Q?J?4Z?#???????3??|??4H?IbU? +T??GZ?V9i i?~??Q&o`?z_??u3?4?>??%??s?2y????F???42Zy??? +(?l??U??h?????M\????q9Om7c?????`>?A????w3?F?m??E7d?????,?????~????2w?l???C?ccI Dn????-?????????o?T?35?JSj ???d???r???%!c??q?$^?j??L???i +??}?XF???????^????}???6??????E???#???UIK???????YAO????Y?|?*/?E??^??%w? +??@?????{?????Q.3?C?:???8????"???KN??^ +??tE?/}`?Un +???R`??2j?-?i?j???w?_???`_???B??P?w????4W:ya? ?$-?b?W?? ???K????h ???Z?i???P???j??????]?????Hs???f_q????????M???_ ??1????????D??>???s?k?Y??r?d})%??6?? K?:?l???9???ql???.N,??$?&lc?1g????#?;Ig+??N&F[?'????}??4m??0???{??f=@w-[??=? ?vpc:??_??F??I'L$P +l??;?????`w +????9>H^R??????????E???i?Cw?&4}?L???h????RnK@?P??j?;?L?.?suxB??,????/??@?????? nS.1Gm??R??G??????P??b?n???Z?A +3?k??4v??v?????d?n?????F?????? ?y??>?P??,k7??~???X???x(?s? ^\????4 J???,Ee?8????J?xU?6??E??f.~TG????C???l?>?n]> endobj +176 0 obj << +/Type /XObject +/Subtype /Image +/Width 800 +/Height 600 +/BitsPerComponent 8 +/ColorSpace /DeviceRGB +/SMask 190 0 R +/Length 78320 +/Filter /FlateDecode +>> +stream +x???TW??????DM?Qc?5???? ??{??^?A?^Q??????b??????????spf+????9?evvfvv??????{) +??$((????i?????M?4qrr?????PfB|???P(?????~??=???G_EFF2?fff??_. @???? 5j??9s?j???1c???/^<? +b???5?? ????P}}??s??k?I?/~???D????5jT?l??~?????#G??_ +*?A???$??={?vB???$&???W?????q+??????+|?T??MLL??++?q?RW >?i?JWW7,,L(FFF?????????zh?p? +i????W`i??????????????;ee???[??5+33??z8.?+??2?????z8.?+??2?+?Wp\?????W? ????}U??MNY?y\?;)?!??V{??+9???o??P3????M?|eE;.\j?W5G_???????W?,?E????A??R???H9?????~??JYYU??(z??@??1+??? ???????I??'?N?V? +?BAA???:? j??????J???0?? /??????.8?m?R??(+S??\-?>[x???2?L + ??y???????i?Q)Q+?????R??????G???v??6???9???????????-.m?????5;???}v??????\?vh?^?]?X?w???).?&??=q???xY?{???????Y????????????????{'?????_?[??j?{v?^?Z?Z?????=?????e?uS?n8z??????v????????}????_4q?V??;J?6??l?b4?|S?)_???&??????Yw?V???W3??G?__M?1?L?l???x????Z?fo?M?????????B??????IfR??????^M?W at 9?u?V?&Mbbb at _??Z???|??(??o~z?????=ful9a????;????mSZ6??j?m?? +?A?l??Yl???????a????6]???M?????;???+ +????9???5@?*(?B??7>?X???f?c?/+W??S??????w??i???[??G?P?????!?????=???uP???;LV????f??????????F?[?W???/?o??&Z?-M4??3?2??d?|???G?/|?\WOP?5??_YTc???Y!?2?W????V?Z???R_? ???W???fff?5RQQi????e????j???,Dz??wk??-{?????4[?s?A??????P?$u!PNeP&??x?!2??,\?????/????????????-;??mo??O?fC???m???"??6?h?-[?T????3?J?????:u*??(???'?????????3n?????$?N??f??0??o??????:B???? +?????_?h0^R?????ngm??oLF?J??Se???Rn???;v???#77?U)(,????????O?{??? ????9???:z??=y?:s????.\?n?? ?z????%?*(?????????(??>4h????+?Y???XI ?B?o1??-[?6!?[???U?????? ????/3??3:????????PH?J]x<2????%\X*???;?Vw5G???U?? ?g???v?a??@???/?x????&?k ?e?E}VU?? +??????-?-????aR?jL&P +*>? +??????Y?(?e )#????????7?Z??Z??rw??o???????????v?"kpwz?zj?bj? j????21)???=Y???4??q???+?6?n2?????????M??L}?Q?????pcb?d?:}????B??k??s????:=LF??c????M???Z 6????Wc???????G??~f????l???/^?~?k???_a x?t?? +JJ%?hX???Z?( ? A???????6??l?V?????????=?t???o;?????Cz?????V3?????m???A??#?????D?G ?~v??R????3??W? ???,?o?WU??a|(UV?WTD?;-_N?,?????3?b?LW?P??TZZ?K??*,???N??????50 ?^??:???"???????f??]????c????+Q?????-?????ci)??????m??t?;?l??????=F???????5??????K??m{Y7?m???)?5?6?cK?1? ??"??d????f?5?[k?????????FO??}u?0H?/M????jF??LuzZhw???f???I???Nc4:?S?0ip?)???>???????v?????i???m????/???/a-???Y4???x?????v??????????????:N?i??.?u?E???N?z?????R???? 5????1h?`???? ??1P???F i???V}-?????????1{??e!???W??-?????j0???_???? ??????j?N?u~????????????wn????P????m?q? ?????k????v?2?HMM?L?Nw?P??W????Q?????qE???zX??r ?U%???????Z????3??=>(V?1???8I??#???????M,@}M?????.&???o???E?4?%p??K-??m????-?i??m????????N?Wu???k?Ev?????????mJ???z?????^?F??=?????j?}?:??q K_?~???????e-}?????G??Oo?>????9?O?1?6??1yB???zw????????????G?E????????]Zm??b[?V?:5?j????????o???_eeE? +/_??}UQw??1$$?YC??? )?]????????$?.?????????q?lL?vdk>????8T????5???AG? ?/V??r'????????=?I???????8????????h??'? ?*?Q????n}??= ???&?n???? +??bRR??g????J??#???'?KA_????????{??????g??????'zGJ-?RyN?L{??(?????*4??3,???????G?I?'i?2???????+??4 + ?H???~NP??QIIu??IH?&y}}j?j????+???????Y?x?P(??0??????w+??W??%}??*y??W?"?+,-????3)mmj?\??}yy$dk?tJK?Z?????d??O?I?;? ?????@???W????????&?`It??>????Mj?JW?ru%I??W?GA?=@???W%??%1?C?P???????,A^??`cC?+W????}????;? @_?????H?*MM??? ??4bc??{????84?W??JIjj?v??@_????=#!?ff??K?] +?B2????d?????????? +?5??+ww?=?W????'O?P~6?z?J? &??F?????? +???NE?|?q??X???c??j?h*2~??!4?rr"b?U?}U??U?????]??I?Y??^???W?|?@ Nd????CT?p57??M#I ?W5R_? ????????I~????R??}?? ???????U?P?????)?i +??L0? +}U??6#?Z??26&?T??H*x77RB}Uc?U?0?-?M?S?%????[o???&??u???;?l?{???????s??{Bl???????/????gv??NV2??o?{???????/???O? ? ?W???]??G???e??R\j??_?U?`????????????6???????Off????SM???/???e??? ??^??jZV?{???-???,?,??M\gPK?L??7?+?????,R?y?? +?T?J???9cW? +j????x??5niQ???Iz?0??j????u????)??????Y????f????2??/Ow???S???|r??q?????CM??E??1??#%;c??&?/??????&J??1????Y?<{?ts11:d??L?/??\?????w\n???0?????$}????????w?g?:???b?Z????i???r2??*YZ#3Gd2?g~?????x\Tmwc??{??????/cbZzR????h??Z??JO??O???~5An?:}? +&%???? /=2hp????hD???????@???J[;`????K???yZ?6o.d?5??(????,P?(ro#n????O"LM??????????F?J????733k?????J??m?-[???W??*&???!5+?P????[?z?d ????????5?E???MR2??uE bm???)???X7u???|????z???z?&?s:?@??p?G-?????>?B?G?}P?!?W??_/SP?o??B?5T???+??*p1G?}??oQz??B??#q?????q??h??o?O????z??Zj????#????D?B?_??X??2?o8Ui?(% [d3 ?.??~?mTt??e7s@????"?_???^*./?x????/??d?wb?g?*??XT??_?????fhhh~~~xx?????q?*Q_???j??'i????U?Q?e??????#????0????3???????3D&#Q???7?'bD?`?w+???b? =???K????s'???_63?x?=4h??????'$?ell??h?#l?????D(???asG?\????????t] ???t%?R?T??zy??Fo????????|B&|???F @K?j?l?d?????_E?/?Vy??T;??? +????????_?????-sy??*?x!;g???q>??:Q|?*??2?6??3???\??????B5? ???RK?R3gB8? +??????M?V???{? &$T??c?8o????V??Y*????[??<%3'v ??+,??Dg&??s#c[???v?I????X?(2???)??U??f?=?X-?????T2P?xrz?~?O.)????*??M.???Hg2????D?)??????u?si>??6???/9?'6???@l???? }%JU?????c?b????G?4d?D?z??N???????'?? ?(?CO???,q4????u?H???e??o?B2e??}H???k??????&?CSR*??&?\?c?9??1; ??????_D?Q??jF??Z_EN?n?D??_?/A????2?haM??t?xQyQ?V?????K@??)??R??d:??d???$?;i?A ???g%????????????`D?=??~Rm??7?u?????????_?F?^@m.?6??????h?ON?Uju?,-??X???Y????S??x'm???????Ir??G?A??? ???'?{+??frz?$?!'?O[cA?q????????!?I??????k???M???????7?H.#??????yp???g??_}??0??T?????3nfN=U2??>??2?3??????\[r5????7'-??\]b??r?1?6J?25?w??O?2??Wl?}v???o-vUh1???]??x??W?d"""???RYCk????!?????D?4???????\Z????3?????b?40????a?^\?LO??w?ZO?as????????$|X???j?0?__? ???owP? ^??D?`???N?3?R???"??}/?????^??S???l&c}7??????????????{W"?????l?F?=? ?rj[???z????v?C???> ,???W?p??? +?j?o?????1??E????_?!??$HA*?J?|I?*zn '?&g0??/??j?$X7???E_hzjJU&??o6|??Qg8?&v{?????b???A]??;???T??o???b=6??Xf??3???????/?'? +Kppp??CBB?+i}U????>?????+??&??&?`Jl*?$c{B??q&u;??tZ???-????gO?{b?J?]?El?ux??-*:?????E)zPQ2+&???$????+w?VV,?J4![Fr"^?3???(?""????>z?????$?b???ugw?&????c??q?J??????M&Vt8E??31?hJqq?W?:OZ=?+E???~B"T????????E?JL???FU??? ????*???Zrm??? + I???P?wx??R?B??l}???@P?????_EL?????Re???#???QPX????2???w7|^??|????+?V3.?wn??3#N:X??8fau? +????u??iPj#??3?????z??????K3L?2p?SNn?n??^O?N?v +? +???j?vR??2??????;d3oj??rpp??j#~?????? +=????????????g??g???)tDr??)?????:?t?????&??P{????jAj*??rSXH?3$%??}%??,3r3???C???c<+)?%)?X?_??|RnQ<C?K-?????]?? ??']_?xw?*}/}?[?X?|?a?(???I???<z??????x??s?gh??????W???L??"jf}?|?h???$??^'?6?6?~???PN??????????7Yt?8=N?]????\?????Ej?2? ?E_?,W?Y2??A??5TES(???-[?e{#??n?h??W??d3????????????)Y?H???q???F-??TL????$?n?U??gGL??b??.??.?Dl??W@?????J??Qk????????????qAa??+'?? ,J9???/OI?a??I?uj??????Q????tt??4??B_9::????????????t??V2?B??;?r +*aY??? +XJ)???|?????)Y??kb?j? =?E??????{?9?t?zZ?~??? ?IU?? l?}??V++E?y?????v??.?+IT??Z~}???????9? o?Z??????i?2xxPF?L???W?~?uRU?@???FM"3Gd4F?(?tv?7??(?R={b????O?V[ +? +?}??D?????R2?q?L?L?DC??-?h???????Z???I?=??8W?? vY?[?ou?@??kk+??v???? ????DM:P''??}%#o#io???`N?????2t??????S?AZ7Jd?u>1x???}8???aa?Q????N???T*??? ?_[n?J?Z??G???????#???s????@^}?;h?c??)c??8H??N?7????????<;B?Ay????+?sN??F?[?W? +)? ??????i}\r ???s/H??`?E? +???$A?:?uk??????????\dn?:/???#4x???I????L????X?q??c?o?dm? ?2??h=??T1[=?+???*7???? J?)N`?????f#Vn?(?f;n?1%???P?7? ?W?? R?X/???\?Wb?M?}v???? ?y'V_?f?w/?2?#????????&??*$??'???~?t??K???A"(??Y??V*?qiCW?M?L???????P???u??E4????**??0?<'???s_?>????]e?Vn?E_{!???@??E???*????q?c???s??????}????:???k??noXqc?????????4s?????&O?0e??i?/N?se???E?W??r?zo+???Wg?????????????? ?A???????????U??????;V?2/,d"?K??S-Q??n?b{:t?L?I????? y??/Aj??k?P???#???j?11???????$??lh+Z5?.&-F?S?.??!$$?Y?f??_??? ???2??????U??s?GLq??R?U??c+$????W??Is?????{Q???s?????|??d?2r3?|zr*???;?^?:ne?md?m??Z?wn?|???n???p??gG|?|?F^?}'?c?????^?Iz???F|?????/?> +???????s????`'?Z ?.???;???| ,??Z???{K?N???4z4 ?????u???n??X??+|???2W??b? ??B?H???VL?AI??x??=??k,?j????;?.??jw=c?????.]????T????Q?"?os?Yq?I?????I?g?4^??a?4?? 9?#?t???-?v6LQs?g??n??t???????C??_?????,?^?????q?|}?????V??Z|m???????C?zC?\?z6^h????x???"f*??Es>jsQ??????>K???%??????+???W??"1?rv????+???l???_???5ti(???lL?m???f5??Q????A??????? +}U????u?t?hk&g?{?g???|?j(R???38`"??????u?C???}UXX?????7V2??j??-?^??H?u??v>?????K?-}?@?w??A?W?md???^$W>}?-?????m???????????n??????????6H?qE????????_?f???O???)?F?+?N ?? ?=LJbccS??[?????v????Qge?i-??buq?????fj??????_??w????????O???? S??b????????FT?? +????d????:??,???n???i7??.?c?X??&;???kSv????Q?$l???+1>1 ??>????U?s?t?U??Z~}96? +??A_),???;w?d???K??????e;???'v=?%??? +zP at E?[??V???~k?kh?Z?]????? 6???e??-??Z?(aq??? +??{e???G?3)F???,??????R,??82)??J????????6??$?I????f???h??'???>?b??`??Q??>7??L????#???]*i???)}????}T???2C??7??????/??P? i??y```nnnPP~]?? ??tTj???1??a??e?$???%R7HQ??A}????????;C:??J{??k,?O????W/???K?.??????&???I?2????Mbf"?CDo?mi(?>?,???(?83~??{wu????o???S???t9L?7? +????(Cw??}^?w????L/a?PF?o?W@?????s??e?/J??>??Qs???t?/1?J?c?6n??????????t??a}??Whr?+????-?? *?H0??3]? MO??!???y5?????(+?A?J?W?~0;b&???c{?9???????Lo??????+???????b? +? ?Z???????{?7_??s? ?AKH????fhx???????,-????mb????jQ?W?_??)???FV?????W?-Eu>??P? +?k?J?s?????9??????*3??????9k?mr?????> ?2????v?N?mUXL?????VYno?>?}???b?A_U??????S*??f02???yf???/?-9QU????>U??x????=??vm?S?Cit_????w?Z^?aZ???????????%K??G???@_?eiss??%??W+AR?{?x???%x?J5?Ur??????3??,N??;e??????v??;?????VM???k?8???!????@_?eiI???p???????i%D?c??0 ?????m??W?1????H4??bK??? ?pT??C????K??)???e<(?Uf???~bf"??G?}??? +??~?j??????~?Q????U?Q????xJ?W +k0v??ga?o?????jjR???9H7J?=???????9??????$?j????K?-Q?V? +(????)?[???3;??n%????I????W4??????/u??g)??{?B?????9(????[I]B?U?D?T*?c? w)?A?M?X?=??_??????w6??RL}??~>????v??o????F??V?? t? ??????????>??????WR???hj}???I?E.?bdD?Wu?av??8?d???!C at _?????YY??U?x???{=???7?F???n??AI???L???u???v?B??8)?qoZU??????k????J?W?h??^''?kY??Gik??aaa????????J????????"??(??c??bB?9??wg?,v?%? 9??^/;???6??[JNx}??~z*???P,XYT????k??????>?? ?A_??0s??Z9r??d?? 9????#}?/??5 %b????mkw&a????-di?????????7??????d?D?@V?=,?AO??6l?s[l]N:D??R4}?9|??)??Z?4$?C?(quH?pw???K<u??%???d@??M^5?d??P"??Q???]_S?W?Z?b????z??A?i???? +?W???j2?W'^???dg,???)UUbf??'??vsc?{y?_Y_???jrVG??????`??????utd{i?Ye?f??WO?N}U???????5j?HEE?m????-c??W?????Bco??b???X???????,FJq?'????+Pk?7???\j??? u?'?J??If????e?J???j??k?UI?N???T?"???? +?W??^??s[????_???a???g?bR?;8pw8e???z??W???v???EObR?????C??$N?n`?\)C?= 2`?????|2????????????:::????p}%??2;?z??8F??2??????j???L?? +???8A-_^??z????h/???%##?i??U??v??-)???S??????S??~*???$[??c:???????H???DzS?+)?]l?~ \\??+J??L?????by)?? ?+?R?Ul,?5ZP s?????7?????b??????.??{?4s?'^?v????]?!B?nsQ????T??+*?V@,Yf&??Neg????2?d??}U?xyy??=?b???;?y???%???? 4*);]?hxLjN??q+zW????H)?R?(?+???UAI???????????u?d'I??P??~?????@???^}???????S >N??H???????c????l(I????v???????l ??????;??'{??~(?????????+A?$?]?????????.%??(??4??L??????????????7?N/??J2?l-???????m?}%????\{{M??nBf?"<1qa7??@_??0?_????}????B)?^?? ??^?? ?H?,?Xa?q??<'?^?Q????F??j?????????NF???????=??P?WUOAA?????????{?^_?????M??N?????u?EdW???8??b?7jQe'????&'????{A?d??? w?a?I?U}?????r^?*?$\??*???-??????????? +?}EL?~j?B????`(V_??B???????[??ek??{$Q??? ?;'+??b??&N?+??r+??I]?V?}D? Z?????????Y?x??k'??kU???? &E_?^???}?????%?? ??????^"???9' ??I)?>?m?J?^?????U?PM?\?? ? 6>6X_)B?v?W at U?+??\B??9??X +d???*???|n?^?.U?,XJ????X?u]m,#?U?(d?$????x???k?o??]? +?D???hQ?Oo?????/A_U1??????}???t?????43c? ????R?b??}6??g??;)YHB+?6?R?+???????M??/e?{P5}ze?????N??????s_????)i?+ at _???C?z%?????E?GA[Zv?v:?????%??^?Z??A??F|??? +j??"K ??w?ow ?-??+_ ?`11D\ED???2(?/9<$;?[qZ?+?l????.?&_?m?z? ?Rr?b<>????4(???g&??Fa?m??G?=;A?}N?X???????? ???D?pn???*G?S???o? ,????????.#g?*?.MlFS??????????wPw??a?Xu?1[XX????+u?L?? C???9????b?W?DG??????????s?$s?[S1???AC??c?%??????L??NM@??-]??????t8??]????K??VQ)?Y??N??? ??JU)?????~u:?Xa-m??WAAA???M?6UVVn?????S4n?U????^??.? [en?F}?????y++/ ?ZRT???YL???4dmFG&??hZ?>?JW?? )??R??????????????ok???????? S?hM?8????????}???4????(FO???m?-?Jo?k?}??A?m????U??????}??v??\?V9i?????????c???????X??7???-????M???W?*l??????????? ??????????g???Wtk?????9?DT??H??*!?,:zJ??l?=j#?????z??=?????????I?+?Q?9?'_??;/?SN{? K ??n??Y????????? ???o??)4!?U????!%?s%??/^3j?RR`%=?c{6?IW?? ??c0????-1??5?b?5??~M??~5??qd???&tFFcU~?>?????YQX ?m?yN56-v??d????????????H????????W???y????1K??o?`???????>????????C?????m? A +g?n@??G%'?????yvd??)?^???? ?.8???-5??U at d??-W???????? +_RpU??*?J?W4???72?o??7??tWs??y8??E?(k????????T??A???????:????#?#V???z?L???%????\??????R0?s?X???  ??????yyyaaa?????S?? +????l?gK??q?y:?*??%??WO?L?????(??GN????(wN-?N???TG??? ???{?R??????+???e????/N?Z????}`???dPT???w? +?IYI??'^????M?C?xq?>?/\???????;b??p??????w7.?_<z?????;??\;????c?????|m,??hO at d????0?? +?????a??n??????#-?6tD??L?????6???t????.&????{???8E5qx????x?C9?(6????M?Q??@MxxJr???L?P??3? ]?????b?????/_yc?????n??|w3?NX_?N??????}?/??e?}??*-?V??m?d???][r?q??=P?^???c????m?????!??~-?WPln??????~w???`?Glu?GHk?Yk??q_2+??V?+|9?O'Y?J?p???Wh??za*~ ????$%f??????????W????d?g????p{??h?U[????U?_!%C????????(???8? +?? ZI?f???k=??y&??-?????j?Lj??R??jA????(?fe}T??*2???uRO#?????????d??z^z?>????T??-?JWW7,,L(FFF???????}E???????O???<{??$?C??G??^?qD?  +?oZo"=??[??t???z??????~cP\??w?Y R??EIH???!????TUQ??p??6????(%;?????BO??7??0|bx?;?????9)h?WR?i????}??r????:?Z?>???_??m??m?p?9??o??k??q??,?^g?????,??4?4?n?????j??b?ro??s????P??????c??_??????aU&{??j:???$??`?EUUL??^?(4` ?t??~S~}%?#????f???!?,?????Q?6uv??? 6??t?????W?>?L?L?N?z?_C2 )???????3`???????????hw?n??]7???????:????)???F??q=?w>???r???#??1?????+ u??????3??}Q? +???%?PW???`+?T???Q?:_NN?c@}??Q[???:??? %Epgj???zoe?R||?? ??d???d?Owx=??\?0?zB%?!???R&*????W? ?i??V6B ?%}?8??8??H?Z?Oq?S?B??W@??W??2????2???czq8DRr????C?6;?}???9????????????0 ?????E?i????x=???o???'A?K?P??????;?JE???3]*9k?6?? ?i????t4h=n??Z{~??B?oa?TJ[?rqaf@7???+??1}??n??? ?5>??k??(H!??C_???? + ??L??"???{?)V????^?L?F???O}?j??????~??j-?^?????%?D($????!c???QO?J??+?W??|??8?????c??@_?Z???????ox??^?T?? %?? +??Wx???????hOFn??-7?c?k??$E??a???l ??y??@_U?<?W?b?+6o??m?????E? ??????LN<{QV.??r????kD76=??p?????????o?>|???U9?#??????????Jf??"?;?AkN ? 2??????????V????!???I???HNNn?????/????b?+9#?A????2#6 ???+?F??[??TL u???nIw`a!40???1qoM?B-^L??? D???Cyy/??Cd,o?~?f?N???Z???;??8?((SS?z?k+???Nj,&&?{ +?UUbaa1??_???Jz???5 ??W at y?Q??UE)Q2q???r?[?H?????)"?h?u?(??w?$?A??Sw?P/^PQQd?/?????p2;v????Gnnn??W??????gxxx?ki?)?F??_V8*?~?P?^=?%???z`=? ?v?9?555???????[???555?BT?????m????K?^?????????V??\?"?j?P?z`=? %???????l6;&&???????jN???????t?R|???K?l?v??m?????????r?v?_?j?e??~?q??Z.?X?H BBB???6l(''??c??s?VY???M?Vc?j?_???{WK????????V??\?z +??B????zT;????????????Z?l?????? ?PV ?444??}?f????.\??????"2C????????j+ + + +>|?0~?????????? ???? ?=<<8\?????fGFFyzz?????? ?O|?!?????????k?????? ????|??7~)@\9r$X ??????SSS?i?????a??*?????????.u????G????S? +X??????????_|????f??????8;;s??/^T?377?? "m??K]5x?????+A????X? ??_I-BBB???6l(''??c??s?fgg??????n%??????p^?W???+PZ8o??WB? +X???? ? +???2?+?+???+m^????????}?????Uq?????N.?t???^?????R???????9??]D?:??"??W??+??,??U??M??Z?zA?S????????V??r?V ?"? +a-??!NNB????Gg??EQ??s?KZ??J??a??r +3??? +??????#;? ;+??w??3K??l??r|6??e?????????s????????#????*^?:?????K~*?????e?????K2?_I??(V????????b}?HD??e???z??????)`d???m??zJs0????wcY?z?Wi?WLo?W?s V?'?.??????P??d?:???????G?6/.=?y"??????_?????9?2???@?V?Zpj???m_????-?h????WO'?????3Q?o?i???t???-????????????8?1???[wN?A??X_????C???LE?+? +?0u??.X???Z?"#??R??? *?G?uP?_(?Jm???Pb;?E??{c??I????]??????h??g?z???:?_=v???cL?9????????f??b`?S??????z-I1wnN?J ????$??7???c?a?a?????????/????CX??aEj?RZ???(????zk??Y_??*?.?68=???M?}?n0h??????cg???????????Q#?????5?l5???GKl?B?x???d1???Q???CLu?????@M?O????+??????r*???&~?X? +P??E(A?Y4?c?X??e?b?]??? ??8?3???????hX??5?O?? +?o??]?[5+?g +?U?? ?`B~??????k??X?????^????U?yS??uQ??gf?u?q?%ZMv ?NY?i??????u1?1 ????k:?1?yY_G!?Y?X?Fa??N????o?A??U5 UL?u?????1?qJ?:]f?Q]?h??r[?/]?KH??|?T#??_I-BCC????4i"//??K????H???D!??/|>b ?????DG?6[:?y??5,bT*?R-\.?.?r ??t????"?????* ???^???>7-W?-?/?LpXM_X?s3Z??_?I?%?_ !?d??RYOw??@v?"??h?3?H?????o??Je?p(:??? +??z????Q?A?l?r}?5??i0????NWYC?CQX_????%??????NdddaaaTT?????I?$?_Q?b?h????}?$L:?=-8?ar?????????I??r +y?w*U ?M????}vIe?ZF?{u?5dk3??n??????]?61???????S??F?*????J?%?_?????5oB?~???9??????-?:???? ??o??k?~??B?0^????? '6pUk??2??'?????????G|?W?2?l\\????/ 4?X??D???z??xe???~????N??Fj?zF=???????Z?N??u??O??]??i??A#?XY?dee)))?DE????#????wfU??(???7??7????.??^C?N1?VCCGCOu?YO5?v??m??^?s5??+iX??2??c?GWg???a??u?????:??t?=?z????`x??C?? j?m@??}??n|?G??. nv???m???5??i?J?????PF]???????W??3????e??K???ub?,????+?O?4?????N??uky?G???????qc???Tz,?<{p??C?V??>?B}?????????f}U???????????a??????^W??9i.2 Y=z??y??6?u??????yI??r?????????)hL???$?R??J???_1?_???(??62??????-J?N???f?Vv^???Y?[?B?Bj????4z?????+?V,9?????=?=??:?o???????6?Y}??????????H5??jF?Z????Q??9???@?N?;???? ??"??????L:??BqP3?O}???-I????k?f?a?U??????a?W?@ECw???????? 4??G??j??j?7???a=??u?W??z???????8? ??H??????????q?r??v?I??_0???UUU?I 5+??o???RW???}?BJ3?: +??i??^?n\??????U-Q'?+Q?_????^P?+Y?<&?W? s???L?????#(???"??????????Y???5???M?c=%A??P????g6!J??_(<|??g????????????yIT??k?R??-_QP?DY??EiX?????T$??|?K??g??DZ?6l????|??}p?"??E? X??6S?aR?H2?/?)U X/?n?j???4?+?zL.??4???zf?"??n? ? ???/??_I3??????555ccc???Y???-??Z"?|?[???@/?? ?????&?_U?k\?+`=o?"?????z?WR???$}}??K???lj ???_Du?IV?b??_ !?4?+`=?????iS?i?W?;?????_J??? +P??U??@??WB ?J????W?W?????????j?o?i?? ????????'^2??)(???_I??K?Qzz????_j??Kn*????,?)b?W?????s????? ???*??G)63??b ?X?W??????z?+?+ +')#?M????q?u8??W?U????A?+??????9???????I?n??X?(????H????????i? ?????9??????,w?O?*??A??q??x +r5@?????y???W???????????w=?:????!???2?s???u???SV?3???yH?_???\????)l????W?J?????$?_??aiu:????+??>$?????\"??h?^?q??\???[Vw?k????&???@vg?y]????;?"@?/???_????v??`S?????06&6m"23??QT????????/=@???Y>K?]O????,??K"??qZ????1f?leL'+?????????rbbb5?+????\a??&????c??????_?L??????H????*???3????8s??u?'U:6??R1?!Jk?R???MY_]5?7?t_fh?????=??y"????3j?a?jc??gYV??f?? +?q???&?>?K???W\?????????????? CC?????????6K{\ZdGp8e?W??t?u?e??=???Z4_E??;?????Wq"5??e?.?5?:? [/@?]??;}?B??}?g?2> ;???&??????__?K?H???(????aP???h?????Y??v??Zwr?S???{E\?H????P[? ?????8????K?*?W\??p8?g\C>??/?^%,,??!???????y2?J)??????5y??7?VK??\??AE?e????????N ?@???;?z?????B??bbH1uv&????????]_??0????F?F??J??G&V}MK??m?C?}???K?5=??????A??????la!A??Cxx?????w/ ^ PC?/???_???????G[??^?$??rs????+??????W-*??x??????u?)??????/J(qM{u???????_< X????2???c??Xp?4???p??p????wV???+n?8?r??>R?A??? ??F?;?dg#w?????}??d??I????3f?A7?2?U?Y?8.???=?)YY??v^B +??y?s???;wJe}?????g?37?%%?Lz?v^???:??z?7o?{? +P?? +??%???(???(??????? ????I??:????~??????? ,?vAvr?(mFv~?5:?? '?r;s??? ??22??u? Ryg?\??$||??????%??aR??f???a=E???l????2?????gO?*????????????\????????M????&???0?i?c?????.jh?j?z ba=?+@??_agr?>??Q?oX??7?????0????{?e????y???????qKOos?#ArC???rJiKL?A/???C??????`? v)1?k##?L>??C?_ i?????{v]???+?s +r?6???w????k????,]??(?fE/X +dd??2????#?", P??W???22?m?mmb?B??? >?~r ? P?????pDG]????9??r?? ?_|?h?] ?JRQ??????j?"_??^??9??L????5???'?Y^~{9???????)O????<??]?b`=??+ at e????Y??o??}??????fna?W???L??2Q?????\N??=??)??_?BX?,???))???2??)?c?Gg???}???d?? d-?+??W?????h ?C?U*??? ++L??g??n??Y???,????~d?9??????zl?R?=???@mV"???????????K??????K??y: +^??k????.? ??????aEN?X?????Xf?:??V?v ??c????p8Y?Y_3?~??!?k?????????|?F? ????;aqa????*?gTbFbz^: ?+??W?i?????nk2??F??pJ??*???+??Zz??>???;P????????G ?N?=?e|??)X??I?OX/]p(???o/?x:???+A??e?>5'?}??K??g=?B?ZF???-?S;???*k=?"2?? ?? ?y???;??8EQ????;??#.a?e\?8???9+9?e????1??????????%???d???????c?c8 ?^ R??_?????????/?z????????????qy????\JK???2#P''a?X?~y,g ?]??&?u8? BA???????Y??p?;Z?d????17f]??X???g??????l?????4??G?????????=?c?~ +?>?Z???? +?6????+G?????TV?o?NO'PY2x??Z??????5G?????????h??m??a??i?Y?c?gq???x??v ?~??9???Df&??jENA???k???78l0??h?Y?P ??l???/` ??? + + +e?W??c9??? #?RyyR)?c???%?? ?P;cC?W?M??BX????????2HW????????8>??4???K????qz??}???? ?J?y???;??d8?_?x?8@ ?WB?_????G|????O?*??[V????&???? ????.??t??Wx??1???!sH????:k???(A]?Y???d??m?8??????,,33???cj??a??I?tg7?????X}??1??????????4xy????????5k?U???+??_x???d?z???i$?G?"ss??\??#1y2???u??=??s?U??"?ge?-?_j????,???N?9?Z??U????!????-H??????? +]???\2[b\w,Y?;?{??????xr?;?vDR?????&=9[&!Lo??{??F??D?s5Zx??z?#??Mt?:?????d)?u+?y? +??C??bn&e$???^7 at w??y/?^Tb:>rV??^?&??Hs?-?j?f?????y?bgG??I?_O~T,_???)?G????X???}??_?C??A|{???#?????R ??9;?$????W???`??I???A.???B`?k?N???e[???_4??????eCD-A???? -??tUC?E<y???9?%?[?n?l??V??%X.??g?.?e5??n?@m: m*???";k??W,??"?y;???>?????????????62??*V2?2?>??/??G[D?[)??-i?WB?_??WQ??f\??_K?q????/??Z?h?+?/?aA????+??}??LG[v??????F{"okt? +? ea=>W??????F??St???? B???????????CC????3??M?????<}???b?9???]p??Q???Kn.I?J?C?? ???m???{?n~~~XX^?>???d*B???& +%????X????????%???Y??????EW`?N??_?????n"'3Ju???Sv?P???K? $S5$8?????j}????????y?$?>? ?W???????'??d??v??????????????9:?Jv??????v??7??CW??x?IJ??n?????j?*(r?F??9?????j?? Nde?i?{??? ?8??2????L?9??p87cn?5???)d?4???4#$$?w??e?/?u??Q,?c??~??& +%?????| K???m???Mdu????6`???]????`?%??1?dJ*m? ????U?Pk|?????? _?b??2???TP?3?W?C???W?~_??F?????VB????RX_P@y??!f?g??:{\??_??????????"r [?x???`Gxq.J?)?%/OU?j????jS?(=?????ePQYe??9????[?m2?m??^?*??I??UFV%e??_???czz????.????}}?k????g???h?U??C?;E??%?U?;?x?i????r?+[?t?c?i?s??????P?Q&???7V?$??\]??|?z????j?????_]?,??+?8?r*???(!f +^??N???Ut???F??????%EU??7d?u?j??r at Z^?? ???U?~k?.??D?]???????????+?3,^LDD??'=??y?1{v???z????????z?????h??f?2??ef}??2????????=UQ???????J?X???oamME?F?d????4?S?[m??JY??e?WVVd"???)???a??)?????15??]|zK6????????U?+Xf????ev?.d?????#3?? ?-???Y??'?L?z??[_0?0ol??m????U?6y2Y0? qq?-???t7??=?0??????;???#????J;?8YO????C?t??Q?/?N9I???????+"(H??ss??M??+fr;U ?)D?? ?_???/?b??? ?J{?`?E?)??1??HeK?-?)??3???5?????????? ?+??W???:;????i?23?????????????/???)%??i???#??v????????=??gn?k??t=%????V&~?????5?8??qd5-?8y??????b???????*8???y?G?>8\????n!??WP?u-??W???H4??2??U?0G?L^9???rt??xdf?????4r?Gq+|????,?g??y???U????_???L+~a`@V*??#7F?p??WP?s +?????m??#????f??z?g????17??{??AF???x?!*?+??U.???'{?--?G??<???? +??W?/??Z??vM?W|?_??*")|??9?x?4YT( ?????dH ?B?e?Q?%u 5uEh?? '?? ?v|j|?c? {? 9c???2?*?? ?qc????y!U?%?PVVNLL??.\ v??I??U_???L?B?_?&???/]F??v????????? +F,[???W?????p#?fZsr?? r?H?=??0??/??K???7?????????HI!?R??????{mc%???(a2[?9 ??????????0Y??(j ?????5???:n????J??_)? +?Lq?ff??????????w????? +d=??E??J????h???+?\????6R???QySY{{?Vq?_?joz^?{?$????A??H8???e???O???N?=%(???Z??? Z?~WW???rm?N??????1??Vc?t??[??d????CmV??I?w?w??'????8??$????W??dY$|??Hz?u??x???p?c`:??$W?????????????????`a?**???a?VC??d~?????????d7??????????UZ.7??:???&:e?+H??$$?4????????!w ??N???)B?:??p???}?;?[R~??P??-5??F???_XT?????o?]]??m????7??=?u??Y??'QYO??n?vA?%ub{?????dr?????d?#?c?5???d??:;*!=??~fc?;%?+@??Q4D?W??0???\S???8::V?c?~wz??????9?+o???X??+ +?"???V??????#?????#??Qw??['1M?cX?I#????????17k?0G??$?v%"?)E??$????XBK?x????3?????n?????\?Y/d0 W????????? +3W?Y??J?????S???O:???O?Y??E????o61uj????_?c?s=r}}????d?o?3??>Q??uqXDOZ?g?|?ob?0????u????_U;??r?N:?(*"&N$???(?_'N??&????k???? We? /???+T?+?\D??U#22>v???g1???*Di???]B????C?*%T???\????W?e{^????K???vv??j?U????5??????,???sK? S??y ? ???g?af?v???Y??y?????g5?!???EY??El?????F?????|?I~?R???[vqYY/?????y????????uh??)?????^'???e????[??????H?Y??*?Yk?????H???t?(??9A??m????~by?K?R??y??R??K??? ?t_??I????????????????D?0[????Ci??F???d?????RR?j??1|?x?&I?&o?zE??? + u? +???@??? ?K????o'??=t??5+7S??e??y??2??G??b?g ?3(?_5??,????????#xr|Z?DD???D~????-|Q?9?\?2????W??_??C?B???+??<*??LL??z?.&>????9q??????B????wG??? +?1W?V~??\?pQr??j?w>??&????|g??????=???31A??W?^ ??x? q?)???M?\?????? Xc??r?????\H!??[?Q?q???Z???A:Vu?aD?C?1???i???|v??ai????[??7t}u4?e`? cc?y~Ea=F?e=M?>???&??v??P???????[G?h?}??_W?????%??'~g??REEB?????"???hb?(1????)y?}{q??*????? ? ?????/e????????C?2?H??*C???}?-??CSX.?Q?????P?I{.?~?{??}??j??z??X?X??O????y??I~yY?; !_????5?h??a??F??0??-2? ????O}????T?????%!??????? |3?[?X?Jjjee??Iyy?.]?,_???1|??_a8;??????V?^!?]????/?fhP??%???i?}?;??\?Z?:?F5F?*-?????_V??yI???????Q?$ ?0W??V?@?&i?J?+???????????i????????k?b??? M?Y?wD??Uz??CBf?!k??????????ff?h%Q??p8?,???????b???.?__GG'22???0**J__??I??UTQ??y9????Giyvf?g????j?!s?s_???????s?A{???? 3WX25_??l?????4??{?i???NNDdd????X?>XJ?&i?J??0~?????]?????I?????d-QX??P?z?~C??h?^~n?5Ym?v??)??I#?b5?L??????= ? ??C?[?j??>???J?o{x??$????_????/?|???Aa?????u??? +R$A???,5Z??8T?:=???N???????{\]????n?#|?2??????T?o???c?+W$|p????A??3??C4 ?W??/u?9c?`???e?g?k????eb=??j?????e}K??s?g?LkN)?''??????n?EAF??$?E???K???|???9_????v?W.dee)))???22--r?GY(?E??????_B,vP?)????\?b??YJ???9,7#?}????/??6?/Ei???^???????[???????????^?*??A????Z??????Y??$??UQah??(??k???{????!3?]8???80??????(;u?S'b???A??,???? [{??.?v?v?,??sR?Y?W?gp???????^??????i?????9:0ZUU5?Q?X??????r???i??%7?? ?????R?????=???????F?M??|??f?$?-_ 7=T?8>???{?7??d???c?j?????>??D??%???????W?w?7Z???Y?Y?'?? ~?$?g???gNJ?;o)???a??'M??8?2??x?+? ?3>/??!????X_[?v?c??9?Z???>?5?k?????????????3<?&?",H]?? jg?c2_???c3????Ld?O!kQ???KRl?o7?@ +I K??'O????B ??J ??2?tt&???"??%??+o? +A?y?s??Z????N?I`?? ??????=??E????EDe?????(B? ??&??????ba??\???4?/??;d??! +?kd?d?X?H????~???LQU??&?V1q-???!a ??????+?? +P???iS??2?+??H??%1???a??Y????y??K?'?|?H??u?I+??wax?^4????0:Z?xv z?5\?????Kze?x??/?I$????????????dd??]?)?f?I??B|??+????X%^??}????nPL??Xb?h??~@?eZC??{2>%????,???C?Y???]???_O?]D??^?[?D??Y?Y?C?O?8)? ????d6;^D?G???A$??=v? g`????Kg??r????>?a}f??S?W@ uBJ?????3???'?b?$??(`x??,/eo????????~???W/????j?DTZ|??zT?m~?????????wLV?{u??H???\?Z?3???4S?????,?,??`-??Ts?\??9?E???? ? ?Jm*/S??PC?ABO]??J0??????X???_aQ!????=K +????s#???R????U????????????8???k?2?? +?.?????j?'?a???`??d}-?????? ??? 3???KJ"*?L!~?????T?_a??66??? m3?9?t(+??H.W???@?e??Q??+????|???8?:???b??+??Te??G?K??p~??i???Q????/r???1Qy??+ at -?Wl6?p!1u*??e?WII?????_{5C?]J?K(??=?"N?/c????;?,K?s?e?r5????xY????;d?g=?oz???E?? ??6?&?-???????xab|Zfh?y??K?` ??2????????L??`r?9A?`?5E1"?,??Q?w???&T??{????%|??b?CZ ??=?^?????????7"wm?p??i?????8E?????? ?&? ?????3SSr>J)sV?????0?????5k?????+p??b?L!??|U???????????6y????? DO????KTJdd???N._o}??-aF.GNu??!??!??'=?_?l=??XA?X???f?XW?\??/??S??? ? ???????zL?V?n$? --r????K??1j????w????>h?9? ????????i?i[v??Id?????G,????~ +??_T???????5w???Y}w????|?(,$??'lm?Q??}????mtH???y?~?m ?j??+??&??O?\X{e?i? ?tV+>2{??6?u?f????x%????f??)??`????{q???????pw'+????4XN??????X???W????@?W??W?hf????[?????&FF?? d????|a???iN??>:?@??t???Kp??T??P?V?O???&9/?^, ]???????!\M?5?K????????c?SN?_?_?*:?X????#3o< ?(n> sssSRR???k?????{?,????/ KK2?IKn????\??TYd?|?? ?o??se\?/"cQ?$k?R??z??4???WfO???r?@#??8?~??? o? g?67/k?Z??+~f??x?@C??x??P????%??U??H l???m????l??O?? ??77?????|????M?????,?,-]??gm???s?@m?!x??IX?xA??????&?cxYwo???G8??A?jBz??WG??cp??y??T^!?u/*"?E??"G}??:4T?'H?b?_???o???????? ?????_Q?z?04$)? ?`?4???{q??S??cz?T?(??-?Li4g$?%Je?????|s#=T?LH?I=??????F??Z?h???X?Z8_ ?H??B????>a?m???3N?=??<0???+!??JE?GF;??,???C7?o???Db?N2??????&???r9$?33??t??T?????^Qm??n?"?+UC????s%???/? ?F?????b]2;j?#???g3%?K=+??~?????:?v?????q?????L;?o?V-\H?V??????U#?111!;?^?z???~Gw????(??? ?y?H?M?:??+W???? +???)??|z??$????iN??f???+??S?? yy????ml?h????? ???????q?rN~??????r??????qK??????Q???/??/?EZ?*?#??g\7??p??-??????????Uqs????????SR???{q?f???+,?3,???=6h^??R:??j??Jg??k?0~I6X????Dr??? +??_?w????G?z?eq???????oW????/?k?????????I?z? ??wV^?x ?????????bc???J??_[?c?$|???????+?_?x???>??%?]#??K???'V??J??ad?rd???8I?=?&??vF?:dud????? +?H????M73??Z=?oa?-????????W?????]?????_c???~?k/e??^??6l-6?!C?&>???3???=?w? +????#???p???6'l????e???xs???W??????WB?_-???=U??!???????%_???b?*z| 7P?Y?@?02b[Z.[E?R??A ???m????L??l4???sc????*ZT?va?%?Luv;?g????????p~&#f??Q3? X???w??yp??_?2?:b??????4???7??:?#?9W?8?r?Xo?~a?B?a???? rd$q? ??ghH8;????kH?(???? ^=<7?9??`?A#??iz??B??(QEp????w`Qlm?,?k?k\?{EE?&???-? * ?b???](*(? ?H(*? T?^v?3?0l ????y`j?L???N?????:M?v???????5??n??? T?????uJ +u_??Ud`? +C???q?>?????q?????k????i&LPz>? (????{8>5{l@)?+??(??????i?i?G??n%????Aeea? ???3d'Y??E?a??U???? =n???d?332??~}???*?t??y?Y?? +?@?o?? ???;???R??;Y?X???????^];????[??_\?sCnan8??m??}??ZOo????????s???????? ?>>b??? ?????id?U?p?{?$?H????????f??%;???? S0f ????? ????Kg@?-??Jz+???$u??+^???>???i/J?PEUdk ?`?Q ??'Ym??H?p?_????Es2?????*+??@??????Q?Q????? %{?R!?|}?Wa??? ?Gy???r???`#?X????)?????????????????+?#?K/?????????rB??? 9t?r??c????????)y;?C]?m??<?}m6???J??z\P??pQ?:79'?@?Yvp ????????? ?G$????J?q??!==?|??K??&??b?27??d;k????>??6?zs???w???????????1o?? Qb}#3+*z/?Y?6?????+?oZn ?7z()???|"!????tYff??i??Wxi?XU?8?P~UN?_??W?r?9?n04?????z????><8t???cC9?}w?tV?5eH??:?1M?^?V?? ?Y?????Eq????F????3:k????;?Q]n?$???V?? +~x??GDl,?????? _t?????c?Il?D6P?w???W ?Y???d>????3???+??|!??9wF??? ???8???M6?O?M??QR?M?R?N?:?[????k%?????~?m?:???AU8 +#??'d=??d??????X???i9????p??g???#C[ZL??d?.OO?u??/????????k$?????$>&CL??58????d??T")? 0u??y?-???I???N??????????>?$+?W????????U????????????????6c???????WLa???H??? ? 9??m??????/?V???*?KFo????s?????x??{F??? ?jw7?~?"b?????K???v.h??3???b/ +M@U??X ???U?f+h?7V +?c?^?d??/3??o??kw?n???y?????>???d???!???~v?6??????N%???6p Ib??^#4?'z????l:|8???%dxU?y??ds?K?H?????t??UqU:??#?}?8x?K;%{??l?5*?h? +? ???}?6o?Y?? ??+?NE?g?UK?|???[{7?????w???E?b?@?W?LL???d???/??x????`??nF??????*6???X7K?L*3K?m? b??????:nC?p?U5|jL+f?"?Zxd8!??;C????E??????hJ??^??? ???' Q??^8?_?El?_+?L,??? ?3?k??????????EG?????>? +|%?????b+Ik??|U???*?f????_???????: +?\A?W?f?`? ?+??+??_OOOt????W ??4?_?? +?U??-[??? |?_?5|?+? +T&?|??????7o??@ ??+I?|_?~__?|i??eTT?kH2y?+hk??b1|??+?z?????????A??3_I?W\1??????_I?C |*?{???k?????\????IJ?:k?? _?????2?>? +$?C?j?B?$?W???.*''???Q??B??6*???0MQ?????O?X=>?L?? +DT??+??????x???,?Lr??3?U?,??}?????_?????_???? +??? ?+??z0|???,K?`?)CJ? ?\A???X?????A?W??Suj?.:?`?2Tm?J??S?v?z????s??W_rV~V?_?Z?H|%o|V?_??__?????@?WUN??|%W|V?_?,? yX????X=? +|__?@?W ?+|???@b?2?`??UE v&o????O?b1? +?+x?????_?3_?l?@??????"o????[????? +^E???g??W?W?W?i!]?+?+?zH? +TVnv?????'?????[pp0?xZHW?J??_??C??W?2q?'N|??Unn???7?v??c??+???n??+I?|V?_??\w??i??5?????_?h??g???!yKn?l???2?d????????!???z?+?*??U???*++????*???7|xL P +?J?q???W???{?Z?jE/???M?8??????S? ????#F?????._? ?!?|ejj???????????G???7?m???A Tj?>}????^?z\.WCC???+>>????i??JJJ***C?e?y?Ug?????k?????r?6m?,Y???+?;?g?|??I+::Z]]??N?:?8qB???,??b?l?????zzz??P????????A??????RR????~??9??u?Y?>???{???????q?dv?>>>jjj??????f?????fgg????????jy??d+?}^?l???nU??u???????A??W??=?322j??%??SSS????_??[Wf???o????]???n???^??8?U-????lE???/_?g???x???????A??????????G??9?BN????M?6?Yr???Dq???J-Z?x?????[?lY-??2/?T????M?????fff?f??}?)))?????}O[?S?U?????`?`?`?`? 9?-fW???t??o'~un??????e:???????o?N/?yY?t?O[QO?b??*????VVV??211INN?????=spppqq?M?iii??? .?IW?[*??????qqq999???x^?=?eo+??V??\???BnX=X=X=HNmddT?^=%%%55?3fddd?&?F?U`???????S?NiP?w??l??U????nX=X=X=?@ ?@ ?@ ?@ ?@ ?@ ?@ ?@ ?@ ?@ ?*\???nnnM?6URRRQQ:t(s?&??@?? +!?????$>?????9s?t?? n ??@??q???=?322d|hff6k?,zeFF?mTF?U? ???V??}??A?????-?@e$OO??o?S?N?'?????-?S??3?e???3??????`ii9J?j???????? ?Z6?????4z???o??l????!????i?-_???$''???g??988???????'??????"o?????n??]??J??S|VV????_?????????????????6c? fe1?xZH?Z????-_ab-?????+B??.???~???@R?F?+?+H???????*???*??_????? ]?+?? +??]?TTT???x?o??bw?w???n#??I8:??????????y???????"rN?%.]"n?"????G<~?{?????Z??? +?5???w??E?v?L??7??c???h???????S??ay?c??;??@?waA/.?u??-zMl????=s??y????h?j???q???????z?^???s?????wP?d??/??x????o???o~??(??'i?Pt??*?+?"8??*?Wb??;t??&** +?Y???????uKHH????>}?F?????0E??????/f???:?#F?|?233s???{?.?????????s???????????????O??M4?HJH?9??{r??e?y?s?????_??;R?_??#G??????{?ni?a???????@?Wl??? ????1?@V??%??????d"3SR???26UBI?H??? ???Z09?????P?P?m????g????G??s? '?l??bR????Zs{[???gH???; l????`?f??6??Po?u?E??W?{??vKl;M????????]gv?6?}??m{,j??D?????.??l~S?yM??8o?:?????E?MNY8?O?k|?y?u???tT??#jy??3??Z??????Rc?K ?[??????j8??V?R5Q at bA&?bL?RM?+y?+????_? .t????&hii]?~????G3..Nt???s}lll?^??T?FV????7?8????'?"z???7???M????p?? I:g?CI???|u??=z?j??j??M?g??a???????@?W?u?aoO D??E<}?-@ ??4+K????^B?P?? ?????-?WZbjB?m??d????-????h?s?.?6?L?e#????u???y?????l?eH?????????@??v >|?_~??????????|E??L???L?Z?$Z?D? +??????m????++?$???|??K?TK9?ySj?????qss+?????c?O?????n??+_??????????Ko???|?r??J?9?J?}???[?n?????g6?????D 2.?=,??M ? +T?n655?^|??u??u+-_1?h???_u0$?N%???N?`~v??WfS??i?Mf???. +.V?j ?]??P?c?V_F ??/???D,!??M????W? ?2??O??I;???"???P????????;??????wK?,??1?f'&&?m?-=_Q???qp* ????MQQQ]?t? ?Miii????coJ:??sf9-)?c?????w??9?};^????????~|??????3b???L??l??i??_?5? ??]b?j???I??4?m0E????z?l??f??M???LCMo????? ??P4?Dvn??9[?I??????_?{????b?{???m???:?'v????u~[??j=W5???????Ns???4?9??t.g???hD42??tb?W??@?*?M}??&?? ~?!|?? T????p?k?z?? ????D?j?^ms?v?5?gv?:N[??G??z=??k?u?????sSm???c?Xx4???D????q[??:vZ??????~??4%?[6? ?M????g(??d_?Z??Y _>????(?M??Y??w?r????spp??J? + +?b ?????b%?At????B?S??@???7>|3???R??????12?'?r?b%?b?c? +*d???N_N`` ?B????Q?F????????%?3????F?M?????|E??"?????n ???1D1??u??(?!???8?l?? ?X?f?V?????^???9.w?X?????????~????}Kn-?&/?????|e?Se??|?_?q??????????6j?[??yJo-7=k??????????X?=|L???tR???:`rk???-?t?\?n??6?.j????87?r???? +???? ?~???G????usS???-?L[Z???5j?`?z?~G?vu;:Pw??????C_-?>?v}z???i???M??6??????L????i??&??8??2??i???H???a7?.?&???????Z?`????Z_??????5????;?Ox?~??t?[???70?4??1?;?c??q???#??O???P?? ?? +??Gt??s^??? + +??dP +?%?m*??`?h??4|T?+'W +|???mm?o????}???????&????????"?_$""??{????+???S??G?}?s???a??g]????|Kk??O????????*??8?d??+??F+=8?N tP +???:????????b??T0?' ???U??m??7?z_Q????{T? ???? 5x?>B%qT?(5?]K???eq?W???z?^???U#4o?vD???&-?? i?2x?? G??f-U[??5?i?rO?>??[5o???TCT?l?z?r???K?[4?j?4z????k? ?????q??????N?U3??{W??7^?,?vh\.??L??Y???^???p?4|u??g5e? Jzv?@??X?D?rUE?J????@?W?W?2???W;t?@??a?~=????F6)???n?.i?bk??c????x??T?t?U??oBh!T????j?DS4>??oAu]hC?????? +?d?a?K_x??"?V??R????Sa?;,t+??%u4??+9 ????0MQ??+Q??????? +?^?Z????%d??W????|ooo]]?/^m?Z?????J?z?????B?h?b???B?Up??Q+??r@?tD ??k?????UU??J??C?T&/?As????s?-???B?&$??Y%?}+?, ?c???>???9???9??????w??-?=,????H????z??????? |U]?*;;_?????I???={?d?tJKK???:?.??U??????901-|4????Xj???8t?????x??>[?0?7o?dn?t!??t????????{?????8??k??~?z??a???????@r?W??JT>p??.m?@?W???h)**????????Z?F ???T?!?"??YS?o?X?????%???>33?>?L????y4 ~?&I???9K:?4?H?W?S?o????i??C?E?=y?D?{X?;#6-?+?\???????.????oib?h25????7#?R?`??A}????3????Kzz????????????;?g??????%K?Pm??m????????V??88?fff?&I?r4I??r(ZR&?2oeeu??=?sf?v?>444333++k?????IyKqg??|*[?????? ??,?J?? v?s???~>?!??#,-????\?????6?I??k%?+r????mk8?vQ???Za ??m???????j A???D ??e?((??-???=1?? ?R???T03\?t??I???$?r?b%?b?c? +*d?X?4????@?????W?F???k???$''KKzg$?|*C????7?W???_\?2n%9??%???= ???{?DUAd$;_a????_G?#???p?EM_?(`??d:EQ? +T??J??a|?????]?HT????/?l?J????G'??XWO????o???4????????lX??%??mj^/d?? :$???H? ?hhg???|??|%j??W ?+?+Py???W??_??3???????!???c???a?+?zII???????.,y?V?>/??4?@ ?????n???]??s]??;?z????_?d??U?????@???F?R@???????d?? ??*A?%f????t??}? u?j?D??r?e??-????m??ze???A??(XK?;C?F???;? +? +??9_?????????z?3y?????? +??d?U-?*+?????????? +??W2???????W????wN????Y??l?N???V +????V???b? ????.?A??L??<^?!r??n4??[_????????_?@? (?UN??rcK`"?E7w?? J?a?????0,???? ?.?^?<8??????? ?mQ?=???r __?ZP,_??sz}?Q??e5? +? +??Y E\?1_??s???????BOo??< +{???? +?EUB????????F$$???????;M???)(?x???#8??????j?(_ ???(_?~???!?????2???????oo??????&???iA?W????l?}&??W99?)Z?sRB?MLc?|p?;???/7?03?~"X????'?95!b??&i???o?f?4?0W?z?????? ?.??5?????????{<?z?????!?x???????O ??O[?Hyo?Lsc?????`PS??}pts?:????H?h@NabOF&?{?~$~E~?{?2r$?{?????RY?? ?h?q?)?'?_??4_? ??\?????bz????????C???-??hr??a?????S???2 ?Z??????????????s?g???G?????o??|0C?+???????r;u?t?W???? +????l? ? ?E??X???IOb[:4????]? ?]}q[?p?qdN?<*Q??-??S? L;????????7O99?L +?\?\g???????w?c?Lx???????tr?k)????????8????FE?????'1:r???U?"zL?_??G ??5???????????????????L?6xf?????B?Ll?f?t?Y????f?M???n?B?????w?Q_??6?/v:?`c????F!????,{?L<1{<??????0????`??^|?!???v???? r????X _6|e??D?????????????????d??B??2?Z}A?u???????|????|?? +????"???FwGd?????Z?????_? +???q?gvp6gF? +u??s?'?a8?Yy??x??DZ??:|JG???>;??qXxa??g???V??b1|????O.G]rb?,RZ?b?+??{???v;q21?sv??'??gg?vaad???Q?????d???S?O????Wr2?]????8????????%?f?/???N??? ??P?????L??r???+?P??? D;????'?????nnd?gH??'&O&????g&M"s?x??;1|8??B??????? 5 H?6kY ?o???Dl,??????M(ke?e???L??ld?????B{?*?W,?/??~??7??????&???O?S?7S>#j?????V??n?xu2zp ????i?||?????}Zz?????EhQ 2??|Q=J??????????2d???% ??C_???(d!Z?\q~O?9??rfLV????s??A?t???$?>`=<|??g?+QY:??8?|?*Y??~=YJfk[?^?C??M?J?u>???p??Q????'??'ac???DBQu?~???Gz??n??????q`??Z=??????m????"??e?vL#>?~Eo?^??1I?????)?,????~?O?CV?t?Fw?E-(?]?`]?0ea??v?A????qd?9??? +?.?O????8??]??G???Y_W???????[???b???-O6??1?,??? ?????IKN.Q ???o[nl14X?2=3l?J?_?~y?_}????J?a?????V???z?U???????g?V????????7Y)?|?dQYz*??9mA???nV?R??? ??????]?A??????5??>yB??M?e?x?);7+4??b?????/V?%????+??TUU???rrr?????,?3??rTr9??I??&?????; v?&???????l H)n??p3S??6?d??j?Q??l?3~???Z??>)??????/???]r3??c??a???Kg?l!?*(??{?TQ????E???e??F?DD]?%?V|??"x??JJ?W?[??~~?7U?]?y?X?8?????? q???????.5?N???^RKO..?Mk?,F????%???6~%V?&;*???mbRRD??????????????5????d???>%?\Q????+?Xh????^S???l:E?t?y?>]??{?Y?0y?c?k? w-4?? dNR??7o?hv?l;??G??????8?u??A?????????8S||?@??????$!7*Z???^A?v?O<}? $elL???u?6/???????4s???0???J??JYk_?!][:?@u?p??Z?r??u?????d???')?h??????)o???.???A9?+?>? VVd? W/_J2|+br??(?9|? ?? ?,?*??kw???Gyzk???? ??????dh???????,e???^?4qs?u2??L?M?L??????6(???|NQ?U#??n???????????jz?^3?6n?V?p~a??Y9:?8??????Z=????D"vd?6??????????=~z{t?? +?????^{=6j,9*??!??E??+??}?%??????oNXZ&?? ????eb?T?+2?;:??p52??R?????81;?O?x?&9;X???ew?.?y,???????|????GIII?|u$????????g?n??!k????H?!QO???pF??-???????3??5??&?5h?B????#???^?"???kP3???IsQ??l????????j??p?fF?O??Jh???W??=?fbsr???Jf???W????:?1?\?|??;?`?? _G???SN at R?AS?[???r??O??m?w?????8t??2~????% }m +???\??X=!?-]???Dw???:m??&K? R#?d ?????UK?N4??4?,??G???????.??8??????????_??????L L8;???af<%??~?J??t|u??|r?[?V?|?dMF?????l???OO???r?`fn?Y?Y?wj?8???~???(?q??{???r??????n?XRZ=?1????WYX[?{7Y???x????M??~????Y????.??????^j??3???uf????k>? +Tm8??4|?_??rrr???8????????'???=?lt?????????????G??Z??5es???!;WI9e?M??)??j*?6?????Q?0Y?\))?w????`#?p????)?*?????`:???3/?c%???LX????RFnF????sX?J at v??u?Z??8?k??$??Y????D?xmc?????u??#G???_?d]~??5????_?8@?????? +O????| 'j??t? ????l????_i?CA??jk?i?T???nM??H??b???=?~k{???????_U???'R?/?pb?"U??O j????FL\????D? +?L???3>?????V???.2?.ik ??+?????:?]?.*7? 4V_?|N?=?`??wI7??@?W??????Z{y-?u?-[?????H4QI?aP?????x? +?%?L??????k~K5?????????KZ?(??T +?Y???#??R???9?pY ?);y???????[???? +?????%??H?????zf???k? ????????????J0j?@??????(r5??Y{????b???J?,X=??V???8W\v????@?????;(/??|C?&,8??v??????B?O?O??? 5??\Xr%???????F ?m[?R??w?4?)f??????J=J??*???????i ????I?@??n??W?3?r?+KK~n????L???????B*Q?c??1??/??N8???e?I????h?? %5?"???)?????)ZM(?..$n??/??X}H|?+? _??d????S????L?????rW??(Z?.TNEu!d?I?-0K??6??}???Z??????? -b???U? h?:i ?X?I?efY?o???;B??H]?J?O|U*/???QB????}{???~??r**???M?.|?Zg1}k?1?:?oM.??}??'???f??????E7?????j9r?E??Z??????LL? +|??]EEQ l???? H??~? ?=D???????hg**???3??j?jGhk??m???-? B???]5F???mF?b??LW\????????'????>|??#1l~?8C, ? ? b8??????q?'?A?e????U?P]????s#?w?Vo????8?s$?4D1NM?s?cII??`?%??B???n?+???j?@???/??=?q??X4b?[?b?"??@?xa?????u^???????A?????G??+??)?G?(WV??????ru?!2??? |Um??? b?b??S?;?HU?Z????T?J???w?VO???kh?o??????0?????????w??Z????d?????????z???(???1???K"??"???@?W???|??????%??z?n)?;[??? ?4A?.?M?LD?v.?d??4?U???i?,&'????e?????????-oT????at3v]}uuI?122??G??? _a`?U?????W?;?g3??'?? ??#6P??~4??????f??S?/gjk?D????F:%?|?rA?}?O?h?,?d*S??o??r~ <4??*UU?+??o??3b?Dz???uf9?3????/K??????U??????z?/$??Z?m????"??HP?\Cz X???J?Dc?????K?R????0????\^s:?t??f_??????P??+?+?u?g?>/#?c%??c??Fy J3?c?d?{???o^/A ??????? td???;w??{?x?????^1)1?? ?sr;;?+??o??}f?????~M%?f?$??m?.??U??r?b???\?~?[?i?K???$y?A>r3di??U??????t?V?K??F?? |??-??????????7~U???E,X???Z??i???x???_?{ ?A?>?R???a?1?'?@?????23+???5w??la?J??V?6????LQ????>_U?????? ~E?5????|2?~???^??D???d???S???y??G"?Hc?9:>m?h??\4L)e?n?.?F]j?;?N?X??W??@e??E 7 ??'?~?U3?={????a?????m??Y?dI.F???????KK2??/???b??q?^?M??0# .??lT?W???? |o? ??&v?9??_k??b?\?- at m??n?????]?}??O????3'=$n??#?,?????-_?o??T I?@ ?w k?&*?h?VO?ak?x???c???v?^?Z?*??????i ??w>?????{C??z?E????&???J??+??O~????[7??!?b???;4Tq? ????-&7$?_???;b???b?"n??[??JK#?T?n,?W ????i????Wx{Qb?*j??3??&?s??t?????^????????H'#?3???????????n?(?-????u'2?)?Wj}V??$n?r?a?o????? W7???Y;) ??B ???*?[6? ?nH`b?????????????Cb?????G?=02?~h??sW$?,?????A??S????v??0??M?[W????+W???[}???????+[?%???? +?Tpp???W????'?&6hq?^?(? G???P????-5j?????|???u???????M??C6R????O??q?uVR?VLg?????i7]???%`????7VKO[i??i??U????????<n?N????I,V/?????)z?#?'???~?????L-H?Wx?t??A?O-?FOg~v>?\????.(??]?c? Y(?\p~???????x?t???n?j????_Jw'?7j??J?r??W???c?Zsy???S????1?????Tf?2??j????Dw3 6?#+??Y*?b?z&z??L??=?3?:?????om????G??? ?z??T??9???? +a?NwvC??[p?^??'??%^??;?d??? +g?m?d?fY??!???:?_???C??W?^???CBBs%?W???.*'?'_??dP???;y??j??Z?fPe?B???]?jAjFt??"?1 2??S????L?????g5?tG?9????T?bv?yB&OVO+?}diL???7?????????Sd?k????`#?X????)?*???????f{??k?Fhdg?P?id??????????????? ????K??\|????-??]tj 9[????#?6zN??-????????]??`??p3`??a?%W?_????? XTe????Q?Y?/??????????lAvp??q?-?\>5ME#?7 at qCq ??MSCqODAAQ??y=8??0*???????sf???9????}?7t???P??W?Lee??????Yff???=???e???43?z g?????????k?+itB??eTu?? +6?U?.???$????}????u??tYX???{?9???Gu?x?yg???r?????Rn?????1?\??????=V}T??7/W?$?????x#?"?b_?>??z???K???? +???n????&??_???????6.??.???R9+?0?R??-??k?8????????????????n?????j'?????&C:7u?~#????Fi K )???ob??D?k???VV??J"m??W*???1~<?zh? ]#L%?m???????C??z????p9Z?WI?j?:?????? +??"??Z??\ D$I ??q?????|F??J?>?0?%?H;?C?????w8?`?7??s?f^^?o??e?t|??????^?=7/?z?? ??????@n?^E?rI???M??,??{????Q??G?????cuo^6???????????3?RS???????H??i??-o?????O??g &Mz[?????B???9Xo???n???GR}?_]_X??W???$??????M??U?1i1N?????^?C??g??????&W?W???+????v?fUl?(,?? l????p^??A????RG??????C`????|@_?\Vx,]?????Y7?OH???o?:6,?:o4`=)fJ?????4.???M=0??U??@????3? +>`??p????A???????76?TJ??R?? |??K??3s?K???WV?P???.94$$?v??C3?i,?[??w????%??guL?%???I????w??}9Z?hW???????\@??@?W&?8O[Q3????ss?Q????8x??????9w?D$Q?/???oS??h????\??-U??]??????????:SK+JC/????9nu\??*?kK???? ????w????T?u????M|?>l??#G????????iS} ?m)j~?????x??%[?oU??G2??|?'??2{.R5????{??k???s?:?R????i????????7Y?????d?D???7}??n??i?_ YjQq?y?y???? +v?>sF????K??O???d?P~???+i??^????: n??}????=?p???W?d#?*?&?????=??g{?^[z???O???????3????Y?r????????????+?@????????Ovt????q?+?????6???,?[J???????u??v???C? 9?}????^??V?b8CN?Z???.?|?5?1\??]:??v-???u~??;i+?-qY??????0?????]?O?j?c???e???????~?=E??WSz?$F?1j??????V??_?a6Cf?^II??????????c?v?????M5?pAuV?A?y??7??]????U?+j?"T?|?bZ?.j?+???<[?- ?,??(???p??e^;??B?????-?{iofa?/?:?n???}Z]w? +o??????3b???????????le[OOV?L?4*?L???WDd$?????l?MV???1?E????jW??????gL?~??E?}s?????A?Q?'>)?wtT1B?P_?b?? ??n???6+jr????[)J?iD?7????K?h????????R8"O?j???,?^?2)gg6F ??_5???????N??]E??s??A?????????J3S???^??'??????#?UL????]\?p??s?`X=[T??{????B??r-????????]f??????=?U{??h?Q???GG????=\v??84c??k??$?'??????&?^???K?X??v??N;u??0*r??#?GT??z???|??W???X?? ??g u?_??F?1s&[????/??????????k???ee**?u`???-[7???9 ????%5????ykkV^??x?_m???+, at 6b?Yik?_UVU????O??x?d?X&?;w??K"?P.h??_?8??;??kiPn?gOW?^??????`??????}???#?~???l??? K???}?7?c? *?T??:????????]+N??z`?PS???Qxx?{??9???s?]||19??3d?6$lXwv9?????)fR?v`???1? +?:x???c_z??KO.????v??M=Lq?+W?m???y{{v/??_???????&e?Wd?????3? ?C??>(?;|??I?u???N??#???~?????7??????????I???B?#?????? ?_??V??:??[??k??#FTP???/b? +?xZ?KRRR????M??u?????{[????????e#'???????q??;Z??9?`??b?<?????e|z??yGqv-mz???????j&b?)??????bo??6????F???EeE?????J9}?????\9u9??%??????R?K?h\f???3???Yw?n?#????[E??e 3?t??jB?68?M??8???_?????ph???????>9???7g??u???+??R}?D/?5???A?f?S????llJ?`/+|p??s#g? ??~????z.?|Kj* h?BR2H? ?%K??????????rSo???????$????Q?W????N?[?r?mI????? ??(o"?H???? ???f ?.(?r?]??W?l?d2h???f???|n?y?y????b?825?6?v?????K????{??????so??6DUm?\ ?????5?????b?~}????V?.f??i6??q?rk??????5>~p?V=5x0 ????Y\???>z)???? +?ka?J:?B?;???????N??v[?hp?? ?0??1?e?????????l??r??????uY?ect?????~?ovG??????C&??e?p??]??|?BV?6n?h?????.?tt?????w??b????f?~?? ?m??J&N??33q??W@a???&55U"?ddd8;;{?T?????????W????~???V?$????Nw*?????^6|?{n???UH?\)??????????2?W?z?Ut??;?2?:??????g?H?HR???w??}?YE?9DE?/%???tm?T??????t?zi)???[???n??:???0??\?Z?W,q?S??VYIySXr??A??? ??yh??????z?e???;6??????????-?? ??8q?A??dV????O9????VVV-Z?044l??????KJJ????8#d?W?C??\?m?"_Hr%?KV??????HV??,@???ay?U??l\?????G?*??? +)????????6?5?Y2?"W?`k ?tr?f????=???;?/?????5e(E??a????????z????,?A??|???l?E????a???_gE??????M%@???:/?~??H?o?I\?m9???&\?|?+???UMj4???J?M??r?aKN??v??>?????cPrG?T/??Z???(???8???D?^?' ??+???Ay?????%0?)???????J????y?66l?H]?Yu??2??????????m???IU/?? |????~??%+*??o???4?uv?d?ummY??H?EM? +?????]???t????f(T=??????QY?9O??W?F"aSkW?`#????c??A??????7?k?????????;#?+????????/,TG?>???W?_?????????????eKh??1?:?????u?_?SYY??g?? ??_???J????? +4<|||??????+?P??W?_?z????m??????s?Z ?W?J????_A?? +<' + ??k??\?Uo??:s?????+???? A?_????????????R=??^7?? t?_??.??gT=??????o??v?Z?h?`?W??d????.?+?????????x"8LLL8E??+:=?j?????rX???V????iDV2$"??+??/?+????z?+??????A?~?bEW?&???6??W:9~U/??????? +?z|P???2W?o????x?????nT??-.?? /:??T?????????q/???R?^?_A?? +?_=5O???^???????W?i???????W??[?J??T??????Y???J?T????W?W?_?+?? +?_??*?????bg??_j??W*??E?SY?T???J??j?_999i????~q???_)?'-?+??????z?+?+?+DZ?? +? +??~??@}???#G?l?R$u??-$$? +????L??WP=? ?%??;6;;[,?9s?k??AAA?W??????+e??????_?+P??;w??w?? ?D?????8?Z??F??~q?5??!?????)T?c???z?+??R8UP??Y###??"??????o?????/?MP_?;?Co?Vvv????????~[?cII?Y@=A?j?????I???Y[[???t?Dr???????X???S??~;;?-Z?????????G???MMM ??i3`?????r????}??j??????w??;w?X,?????a}j??+::??????S?NQQQ>4?I???j?|????????v(>T?C?@??o??&99?????????w??Mc????HII???HKK???5j??>??E???o?]$$$?????.++????????:?I???j?|???133k,?V[ +????z???*##C?cII?????v???%???????7????2''G3????Z?z??GzN??N~R???:???N????sc?????V?"???Y????W????kyyy#??Z>T?C?@?P??????????w}????;&$$h?S???-F |??y??????????>T?C?@????T???????'????????"??????k????H?5Y???H??oV??s#?d?x??z??z??lllRSS%IFF???????fv???kmm={?l????R???n??m\\\yy???G??&gjk??j?????????VT?C?@O??????j?????a???'O???V??[???????K??S?NZ?4??#??oV??s??S???C?P=???}? ? +endstream +endobj +190 0 obj << +/Type /XObject +/Subtype /Image +/Width 800 +/Height 600 +/BitsPerComponent 8 +/ColorSpace /DeviceGray +/Length 955 +/Filter /FlateDecode +>> +stream +x???! ???????&ku +endstream +endobj +186 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 370.465 151.689 385.196] +/Subtype /Link +/A << /S /GoTo /D (id7) >> +>> endobj +187 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 317.397 220.263 330.006] +/Subtype /Link +/A << /S /GoTo /D (id8) >> +>> endobj +188 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 296.414 285.466 309.022] +/Subtype /Link +/A << /S /GoTo /D (id9) >> +>> endobj +189 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 270.981 236.273 285.712] +/Subtype /Link +/A << /S /GoTo /D (id10) >> +>> endobj +185 0 obj << +/D [183 0 R /XYZ 89.291 778.025 null] +>> endobj +30 0 obj << +/D [183 0 R /XYZ 89.291 392.177 null] +>> endobj +124 0 obj << +/D [183 0 R /XYZ 89.291 392.177 null] +>> endobj +127 0 obj << +/D [183 0 R /XYZ 89.291 360.672 null] +>> endobj +34 0 obj << +/D [183 0 R /XYZ 89.291 334.661 null] +>> endobj +126 0 obj << +/D [183 0 R /XYZ 89.291 334.661 null] +>> endobj +129 0 obj << +/D [183 0 R /XYZ 89.291 311.357 null] +>> endobj +38 0 obj << +/D [183 0 R /XYZ 89.291 313.677 null] +>> endobj +128 0 obj << +/D [183 0 R /XYZ 89.291 313.677 null] +>> endobj +131 0 obj << +/D [183 0 R /XYZ 89.291 290.374 null] +>> endobj +42 0 obj << +/D [183 0 R /XYZ 89.291 292.693 null] +>> endobj +130 0 obj << +/D [183 0 R /XYZ 89.291 292.693 null] +>> endobj +133 0 obj << +/D [183 0 R /XYZ 89.291 261.188 null] +>> endobj +182 0 obj << +/Font << /F15 80 0 R /F46 71 0 R >> +/XObject << /Im2 176 0 R >> +/ProcSet [ /PDF /Text /ImageC ] +>> endobj +193 0 obj << +/Length 1942 +/Filter /FlateDecode +>> +stream +x???r?6??????L???:Y?d&u:?-????ej??h?v??o?>u22????o`?Z???? f?e??a???+?????VF0b????`=??Mt??<^o= +o?W/??dm?*t?>?t\?Teq?>? .6a?e??? ?e??I?5?????i?-????D??????i??C%y?9WC*,)TK?r?eK=?s?????T??ZZI3h$????^rB?? +?w????Ir???r?I G <#|b:{??/??$?????w\ #ysM????k???(??5@G???5C=[Q?n?8???,?BI????$e??p|Cv?=??L|?? /??w/z?]?!??GW???d&%????,?????c ?Y c???r0????;=??ra??s?"G??!?Iw???b?kA???`????????^???S?????K?}???g?+???????e??q?x??pf?zQ +i-M?-????4W??x?4??)??????????EQ????5??{????`??1??t????X???Ua:?(A1dj?k???-? +endstream +endobj +192 0 obj << +/Type /Page +/Contents 193 0 R +/Resources 191 0 R +/MediaBox [0 0 595.276 841.89] +/Parent 181 0 R +/Annots [ 195 0 R 196 0 R ] +>> endobj +195 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 589.452 134.976 601.401] +/Subtype /Link +/A << /S /GoTo /D (id11) >> +>> endobj +196 0 obj << +/Type /Annot +/Border[0 0 0]/H/I/C[1 0 0] +/Rect [88.295 358.058 171.745 372.79] +/Subtype /Link +/A << /S /GoTo /D (id12) >> +>> endobj +194 0 obj << +/D [192 0 R /XYZ 89.291 778.025 null] +>> endobj +46 0 obj << +/D [192 0 R /XYZ 89.291 608.381 null] +>> endobj +132 0 obj << +/D [192 0 R /XYZ 89.291 608.381 null] +>> endobj +135 0 obj << +/D [192 0 R /XYZ 89.291 579.66 null] +>> endobj +50 0 obj << +/D [192 0 R /XYZ 89.291 379.77 null] +>> endobj +134 0 obj << +/D [192 0 R /XYZ 89.291 379.77 null] +>> endobj +197 0 obj << +/D [192 0 R /XYZ 89.291 348.265 null] +>> endobj +191 0 obj << +/Font << /F15 80 0 R /F46 71 0 R /F34 140 0 R /F57 143 0 R >> +/ProcSet [ /PDF /Text ] +>> endobj +114 0 obj +[54 0 R /Fit] +endobj +153 0 obj << +/Length1 1743 +/Length2 10732 +/Length3 532 +/Length 11731 +/Filter /FlateDecode +>> +stream +x???U\??????? ????????????5??@pn ? ? A???9;9g.??j~????j???ZUo5??j?,?N@????; +? @JY^K??????BK+? +2w?ur|g?pp$An~AH@????????@/??O@??jka?P6w?9@??0?h:Y???}X????ph??@?? KV????;??uDa?GI??? ??????? y?\? Rz?$?h??h??Y???8Aj? &?H???e?????+?? ???CQ???T?C??JR]?A???!Hu???????T??C???C? +:2??r??]?-? ??v????G@"?9?nN?:? ??????? -?C?q?????? !??? !?!da6!D??/?,??/?8??B???_?r???7???/?X9??+??b??B?\??N?_?r? !V!???/?Xy?AN???_??? !V????????t???????? r? g?7???Yx?B^p??@???b+[? y?,PV??,????:"????j???r???z#?r?XO6?p\??j????#??lk?2???,?x??;$?G?4?"??G??Jr5/,N?L`??F1 +bW(???0]?R?ny??7*???+%>??????r??^A???>?D1.????9?J?|?E?@?B?,?hd)??Y?qk??.? ?)?=En????M??4?,Q?????????+u??f?? u+?]_*-?2?l&?47)ub)n7(??>?h?2rV????????q? ??Y??????k59?v????SZ#:m'??D?E?!I?%~????W?;??3??_0Y?4?!??l?&? ????<e??Z??2???9.?&?[U~???4??"0?^?GSs??????????,{j?.??G?Oyc?? OF?Xsn????O?v|0;^??[??5;??#&??Oe???T ?F`??7????3n@???Jq?:^?????Ar??,rqZ????!4'KG?a?(?1R1?????[5???V#T??)'?? ?@??X?????1?p/]?MR????w?9?a|??????F?????[??S$H$N??V???k?0?d??k(]???m|v??? +{?U~?0?j?V???q??IE2???:?b????5??*R`8#* ?w??k?O?}??A??F?????4?\???OV?[,??Qdd?o?#?7???V??!o3??????Zf?N????J?s=?$6????&1^?Qr?k????????.T?*g?<???P?hIR'5U??9|????s?Ys??c??`?IX_?p??????Z?G?.\?hl?F?of??`LC]??rd?O|Cs*_?f?[R?L?????????????w?_?=Z0?@Sc1?O9?????o???????/??ShN???????? 0?\?/???????????VL??P??P?+????k????rb?rn?{~t hJ?oy??S?  ?q="yL?????m,???b???v?B????2?`??z????9???<o???V?????n~#3 ?=?{???^??h????>Y??ZU?JFz????-?1??????w/?????1f??`?>?=?K?01??7o????w=?",*??d??\r??c+??E??7??O?W/??k?7?,? ?K???=????????*??#?????v'a? +?????Vb??:??f?J??c??.I????? ?%+?? '3}??????;)?;?3'??.?????~?????]?????p?_7? -*???????5e?u?-????6}%?,E???????R??N#dv????v?.T???X?e??????)??b??????) 1E?5??d`????B4??L22F P???yv?????U??K?????{?%bDdsq?[??w????????6n8W????b??????v???@%,~J??.fI??;}or???U0"? `???????d?????T???v)?-p??j???????/?U?G? ?&??+?%??????]?????y4??e?? XD?q4??4?!?&n???#|[?????????????z???o???V?.4?BH+??pq???? 7?@?T?6;c t??????? 8\O? Dw??9??%???{??~(?? ??S???O?W??? +??A?2b?oI???_{?)??Vb?'h?/??c???p?????/Dz +y1? ?b7??????k???8??#???l?X}P???ZUB??W????0?(?1??&?q?O?J? ?"??w?6JH??? ??????o???2+0)?J?WD"U??'?fD?)??K????-??y 0???5]z??F????t?2??????(B??~y^???????E?|??????@?-Z?[;U?d?y?c????q6w ??`??? ??}??R2??K?O|?}.>???????C??????!8.K,!???]vQ???$P??????J?? ?1????q??????????ga?=]?Rf?N +???R??????39?? ?K????"4j?-??0}????4? +?T?????L???\???0???k%???H??U???[??bk????OvEL?`? +1 ?`?^??o??t??????n.?K?Rb0?s?y"???N-??;??g?bu5?"??????D|??????U? ?8?7??G?c????'??9?S'g????CF;????????%??????8:??????-????6??8?E?pt?w???F[??^*?d???jI?8?X?M??| Z!e? g?K???%????[?K?m? ??& ?????s?????'1?!???d?Y?5GB?~??P%M?`{?Ys?WC???/H?}B?b?5?????C^?Q?0??M??????Y?????? ??:????'?\?/\????zS1?Z&[?k???"?gg6?t?a@?BI43?????da??*an?H?S ??????Qb)Y0X_6M9-??g"?va?&?B?A??H?(j?M?b???J??m?3???2?? ???v?zR ?_??E?7?Nb??E?a???sa????T[?{?W?'??x\?l5d?{?|?sJ??p???^o?N7??X??X???;??t?a:>? +?}?=??????????s???H??z?m???????|???S??j?RW9>??}g;?? ??G??u??P?i? 5Z?sQ???f???????d?9?????9h???V????6mv?-?Pe??)???[0?=q???G??b|?d?e?????i???6Py????@?f\??????[ ?D7?=?(O?????|i???d6?????9\?c??????~???T??T6???p`??("???o????????=??X???=??&????}eZ ??EF?K?6?P?C???v"??#5w?)????n.?v?&??l y?p\?2?Y?h,C?B#g6JG?8lFV????g{9)e?z {8??z?y?e???~?4?~M7?????C(?zO4?V??Y?ok???_?a?&}??g>t?&M?o?H??'? o?7?)?:?+4????- ???S?????|?XK"?? E??7tt?? +0uT??e?q;?`??????F?? u?`1OC~v?K?9???ge^2??U???Av|??%o6???]?'J?[ a??O??v??y?l?1?#???r?aE(j? +1????G?k2???b???W????K?1! ????Z?6????g?X?L?U???#|w???&P+?lc$??}?#A?????sM???t????jU??(?*I?N??H?e???K|???['? ?>R????S)? Y-?Z?IgH?i?XP/??5??????1B(?!Zb?k??A??????]5??????$pqk?I??X??6G?q?|?????????????p?/e p?]?-?K^3??c5#x?k??( ?u vW??z?Mze?BG:o?3u%/z???????????W???*{b???(?T)???]?zM???Yj3??T?xb??pC*??q??)^??_??~a???z -??Ky5?f?&L????=??D?W~MV?z??`k?32C???g?;D?x??G:=GOjzS`N?@Fh?@WR;?R??xd??-??J?"F????)????a???????U??sh ??E5?U?%S$???e??????? ?r??7?? e?????< +p???[S{?@_2???!?3 N? ?OSV&?Z????_p*3JM?????U?N???????J????e????*4??Ky?2D>??v ?Ord!Ei????(]?4????????3???8??bNK2??)?_q??%????)?2^?Y??????w?%(@?G???>?/x+AD?A%y????.???,?.[x??`??9??1?J??Jc?????'?Lk?vl???]??????!1_;g??????k6??zfh??oE?E?9R??=? +1??r????r? ? ??u??L??}?5j?w ?blY???x?p?gdN???????W??#Q??e???#Q????'4-?&??????????7Z~?L6Y?u???%????JB5?%F???^??/LS??ts B \?W?4??????_m?,?-a???????X??f'????X4&T^?:=K?5???9??E?r?m ??;O???'?X}ul?&d? +w??W??f?p?y,d????# tW,? ?? ?k!???p???X=V??+;y^??nZ|?9??~??}??X= ?d?sX???Zg>d?????B?y{??%ku??\??,&??V,)?N?I??H??f? -????Hx??:????&Y?;???vX???(6qVzH$IfP1L[L?,+$?????N?c????J?R?6? %?N@ %?????G??yq?Q??EE?LmX??w???n?uoK??W1????A?m??)[|?#~ ???#??fo'1e?Z?c????p??????????&K ?3?nG)$d ?I9?K?1(:???/ +4o?||?*A? ?????>??f;??V{??q#?\k-udC\??M????2???Q???0??&?`???_0????_?? ??1h?+ ??0~?:&????9FK[?+N3}]??lhu?"??!??????CG??aR3E}??x?a?b?2??????s??y?4A??K??0bcx1???}N1??4?g??nD???????v????w??&u?h?#?s??*Od3????U?N??D?????r(Sa?g?W???? Bm?/z at q*g?{?d?b?????(?i`/~???9%, ???@PD???w????;????L?nIfFH??X?o? 0??G?*??y??O'm?/W?????I??=??>?&?>???m8?b???????*t.???<#@[?UA?A?]?f??? K*????T?k>}W?k.??? +??y5?BE?Jpr??5?+n?X;?K!-?s?vg???j63???????{????M *I??/s?H?????/p??H?*???\???????y??(??.?M; H?U?*??|??r*1??1?X?8@?}????j?^G Q???*?????+??L???o??????g&%?Q.3??6?6.????!?|??~??ZH?z??>?uS?rD44????????;?5#??G?9b??^?|?~qjk??????`#????oS?%??/?n??*??7Uk?????(??2u|??V?????????? ??,??^O?Tv]??????N???J0 ???L??????$(???!?V ???E%S???????/???9????????qq??uyfA?#??A?M???K0e????F?Q?@Z?mT0f?Ra??bI??????????P1?:Rz2??_e?5?WiaD ???????um?=??????C&??a??G???#?Z?@??jD???nu#???Z???????mg!`??'?'o?2( ????.?Ng??\f?#??}?+Xn????m +?4f?d??c??R??N?b@???*?o??d?P????;?O??^????xI??Xp9?'?Y??'?#??z?r??i?=V7n??p]?Q???`L????5z????9T????7???Z????=?D~? ?L??!??p??bL????mTL??*5:?}?????X?{???Q????C??M??=?1\???[?/?x??`?(924?R??n?)??\????@?r??????[??2`?{???? ?l ?wk?{??????cT???a??Jh2/???f0???????3?Fd??????n?????P????\M???m???? +?K?-?qX??}u??rJ_??? /?D??S??\?NU?!az????3v???}???????F||?=O?L???m?i???9?ZZT??????rS???p????? ??+6??P?a???G:??%??WAy?|?+iz,a?H???op?\ f6??S!V??~7g?R??je??8????Z? ??K????{-;s?rT????/+[???4?^??^?k&?N??F?*???j?hX$?i?z???? ??????o?e????rR?/??o?? +D? ]??$ym??q??;?T;???j?????W*o????????=?N???B??)?q?l??@?????Ip)?NS?_?????N:??|???~z?% ???S=????b??????1 ?WF?S_???ePo?|,&?S(K,??|?f???| 7M?>7?3S ??l????t????2??~}?4?-?|_?????5}@?M?h????[?m?Ms????]???/??i??&?1????<5??g,cGFf????&N??? ?>u?&i?l{???????3L?? ?????E???p?????z9??????7??F?E3x??*??Sh?j ? ?e??^?qK?7?q?$?k?????Yq????tE?2"?Qu{q???g????,?u????O_?????q6Z$?G??[cn????????pI???o??? +H?X?z8?????/&??D?M??8????iyX???mA?c?\w??YE?v??zT[?????i???:?? /-????R=/i?:e?T?k????????> endobj +152 0 obj << +/Ascent 611 +/CapHeight 611 +/Descent -222 +/FontName /DCUWWE+CMITT10 +/ItalicAngle -14.04 +/StemV 69 +/XHeight 431 +/FontBBox [11 -233 669 696] +/Flags 4 +/CharSet (/numbersign/percent/parenleft/parenright/asterisk/plus/comma/hyphen/period/zero/one/two/five/seven/colon/equal/B/C/D/E/F/G/H/I/K/L/M/N/O/P/S/T/V/Y/bracketleft/bracketright/underscore/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z) +/FontFile 153 0 R +>> endobj +199 0 obj +[525 0 525 0 0 525 525 525 525 525 525 525 0 525 525 525 0 0 525 0 525 0 0 525 0 0 525 0 0 0 0 525 525 525 525 525 525 525 525 0 525 525 525 525 525 525 0 0 525 525 0 525 0 0 525 0 525 0 525 0 525 0 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 525 ] +endobj +198 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 35/numbersign 36/.notdef 37/percent 38/.notdef 40/parenleft/parenright/asterisk/plus/comma/hyphen/period 47/.notdef 48/zero/one/two 51/.notdef 53/five 54/.notdef 55/seven 56/.notdef 58/colon 59/.notdef 61/equal 62/.notdef 66/B/C/D/E/F/G/H/I 74/.notdef 75/K/L/M/N/O/P 81/.notdef 83/S/T 85/.notdef 86/V 87/.notdef 89/Y 90/.notdef 91/bracketleft 92/.notdef 93/bracketright 94/.notdef 95/underscore 96/.notdef 97/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/q/r/s/t/u/v/w/x/y/z 123/.notdef] +>> endobj +150 0 obj << +/Length1 1641 +/Length2 9411 +/Length3 532 +/Length 10365 +/Filter /FlateDecode +>> +stream +x???U\\???I????8?????B X????;A????Cp ??? L?s?=???????????U????z??$UT?2?1??X;?330?D?TU??? LL????" ??????????????X8?L??a?|"????eO?%??:2G X5?VI??,9??????AKz`????MQ/???\????]Z??4??5V>??uj?,?; s????.?Ivi???e[??C??u?Uxx&wA?$??(??Ba?h? ??4??zE???3ke|nUC?%FC6rt?xT?Mm??H???????(???o3Wd{???f-z???i??????1^Z??UFI?o?#?JRzv`??}X6BF??e??I&?NL???#?F??/??!1c??????8??A???????z?Z?t?u:????LM?=v;j?J??T^????N?k????D?a??C#???;???Lc?A; ????V+o)??T ?????X ??"?1?t???t?!y???BEe&@R???G??jo??? +?g?b??;????????h???i??h?s-J:*?????6??Q?%???;?????3???????YAI?^?/?X???Cb?E??????Q??? ???(?@,?JV??1?_??U?????%??U?}/??Uu?9??3?4y?VK??D?? ?[>99? ???????&&v??]????f??J?}??|??????+f%????m?f???1?8?w??n`?#K?Y???U[??e?QD?w??Dn?^D2HO ???g??}:???E3W?%?$?+1??,?\?~???[q??????r??????T???)????7Zt???D_?[?? ??F???f?~??s??#??/$????q??W????7?]#N????E_?H?\:` + }qoC??????s??? ?*?????)???sg? +i????j??uL?A0U<5?>A???Rz?????t????O?fx!??&???qS?????X?J??vhMt<?C?ds?I?YS????0 ??????:y1?mX6?7?;^?zY}t-?e?CI???g??? ?o????}?|?????}?W??????+v?1?X?P?G??~???=|?Q??8??,??????A?????KO?????z?t<??T=?.????$h?k?jo?EL p????5?1k??UqP]??7???iBJ??Xj?#}???=? ?Lf$?c??S?? H???P??????A\tgo$q ?5??u;??? /??e? e???????V??5*??,????:??/\??TP????7~ +??q/??cSif?M?|wS??Sr?}6w +`*h??j?h??5~?V;?L????-j?|h?$???|? Vjf;\Pnj????9i??1??uY?{?*`$??G?at8`Y??8?c?A?=?c?????)??x????? ?9????U???P?}?P?,j????o?????!?i??%?8?X??E???t???Ll4?pIq?S,????q?B??(??M?l??{6???1(6{?9I?????????3?Q?P???????v??=Hp??rJ?#??)?e?u??c{??????O?>k?=V?????:B?U??B#????$??#????5?2?V?9?W?}6?4 +K????W?[??:????k?~3??U?????$???"O7 P?{ ??N??HF?Ix???r?M?V_?q??K!F8K?h0??|?ODJ?r?ik??]???u?y7R 1|3k??]?X??$???L?(??u?g,?J^??&r????Z  ???.)j\?A;=??^?;a2gZ6?/???????]v?8g?* d??Y}?&???@????5:?*??6A??(?s?? j?????V??1?aPH 8x?pc?yp^?[D.?&?D???^[????3?M?Z?`"?????"H??so? S??2Mg?{jQYt???EW3H}??????? ????=?X nr8?>62????h?. +a???tB'????? ??;^ "?~Q?B?k&3?V?H?'h ??}??'??@? ???R??? ?~d'?q???3Vx??z}?/?^{???2?@~EM?X?!9?o7)??<#?*??x??u??'X]????&??Pd!A??????"s?H/5? w?H>???3?g}?C????a9?|????p????????=?L?1wu?b?@?h?g?6?w6A{??*??|??2?o~F??`]??#rW7???K?GM?? ?h????i?S?????3?T???k?GO???P?A?qgqK?^*l??G?/&??}????% ?i -e`G????? :)?%?? +CF?2J?_???????t?t?T?????????8N?????:??,z?p???E?? ??@,N??ZH??O,?cz???z???????x3r??,%!T??S??%xt?W???? ???>??S7?_ ??B=??~!W????F?5j(???.S6?-??1??{??}?K??????3?Ay`k??+}E`P4 ?T?A?C?n?k?&7b?qd?m?W???+gy??g??:??iy????Z#?Qg?7R?F?g?!????)?&On??3I_???????cK??????o??I'?3?Wo?????%!{6?cy???s????e??E>?$??p??jD?b??S??S???[ ?(????^ ??1??V?????B?}&T1KL ??R??????Y????O??YS3k*?^!/???????A?-^M???' ???(??i??I>U???????7??0?????a?-?U?"?? +ml????i= ??g?q%U????u?3??S?u@??|?6Jv?,t?[Szxfbth???C??/???????)?K????????4s;???8ML?8???P"c?????Z????.??G????*?h???6x}(~B?>?q-?,mn??.? +p???:???Z/TH8??e?0t ?Xx7?? ????C??,$?LXl9RFV??/DW#?? +?y??1?YP?y??=??M'Rc??h??w???W?A????@?@wG8??g???N?%?T?5`?;?*????"[???AZ?x???w?CE????l9??x????'BKn?! "?????@?_?????????}r????????95??Y +?;]X`??G??1??C?R;?gZ?=f????8 ?X P?O?~R????xiM??+o???z?>??6? ?^????5???6???e%y???? ? ??R??Gsf?i??????`?q?c?W?/vY?b?)jZ&?_W ???^??hkw?tGQ? ??I?f??8??*??.)3?/?W?YS{??=?T???? ?p???xX??1??E?a?&K7??O????^?gwd0K????p?f?j](?g??????i???u*??????v??Um?fQn)5R~X,$??mg?F?????V?_[t?5?]??|.????(??++m [?h????????\[??j=?v?B?Z??????x?????|R*"???Qe?&3U F?5-????????!?DU??*6 ????z7#Y? ?On?WD?y??/????k?!?v \????X ????o?;B?eK?9$*=?4?U????7g??R??$Q??gH??e?w?d?? ?S??D?i???O?L? ?_ ??n@ +I???????.z?R????z? d?.?4T{W?:DR??g +??????}??H?W{?Jy?}??|qOU?(?Y???~Y??6,???? bY?u=???X?_?M??????}?~ /???? ??? ^C??????-??EO?V????????=???$o?}#?????_???&??"???????D[?a? +Hd???s{j??a??I?S???? ????"?ZT?G?1y^?????m?.B?(? +?iKF?X??YfB5?|?????wv???.??4??GX?M6???w?.??<$?e??? +r???lM[sz?:t5>h!?}?????=ZI,??GC.???LrGR-L?A??N?T??xy???*?m? ????)?An1??"??Dy??????nQ?`?mp??Z ?j? jK??]^??;]?as???F?G>[sN?P?????SD??G?J??4ns????MmT![??C?@Y?BF??e?5|?s????c????)?tF??PFD?Y^?????.????? ?}?[?+?Es?u?E?9???????saR?`?E"v? ??g??%?h8l??B?7?#?!?g?,Rp?#x?X ]???~?'N`???H]?{?x:x????????? +?5e????8D2Ou?|??:?f??????U?I???H??f?t???Q??????E??7????????U+??M?/'?]?\?UU=y???=]?v??7????#??x??n5r???$??)H??????I??E???G??o??@?? Z???????^`A0#!AOa?4P????/k?S0Z&/?????*D??T??]???f?F6+??"??H???O??M0`.???k?????Z????{;;?{ }~??????:z?:n??\?r???ntz?vo??F?J?,?*m??ql????17??*?? {???K??q ?qu??l?%:??uJ65:??,{D?H??j????`OA?? ?O???o???????o??=?4KDk??m???k???>???ur?n]$?0^?=?,?m?P???bJ?????T???????P?I?#?k*???.?k?8????A/???p??Cg??3????M???I a? s|A ???\Ni?HJ?EK?$J???????"? $?tf?? -???x??+??p??><`Y6Dz?*s^?Y????6??i?8\??/???w???#??e??,?%em???GF?PT#?o,??-? +?D???) +-?I8?LB?rU?>O?x?????*?r????? ?????V???y?H?kS?D?%<[?a??W?\E?I?G????A&d~Z???1??.@? j??Cb?y???ow?1??????L???'??z?????U???.?????H??????nlD??_??\g?????FmN?ex???????d?+)?H??????Ijq?'&????H?ql???u?<\?,??????kj????}??A(???vr8 +?V?!???????p?:U??BYA&o??r/b???I?"?LB??|??sTK?"??,? ?.?4??J?En??av6???"?C?~Px??????2?????????p?Z?8*3?Se%?-?e/??X???4? +*?=?????k/$???8?2[?D?{?bw????? ???3?"?V??|? ???"U?W???Z?c0?{O?9`????Pu??_ThK??.?6p????&}T"??g???~eI?-e??????y?/?n?????]c~? ??h?u?k???b????? ??7#???V?E"u????N/jVI???l????J??T????[?DkYY)? +??fhrf?w\z?7??.]^??4?\??8RD?pW?R?Un? Q??>+??,???=?i??????x?J???_?n?,????#?? ?????M???2cf~??? +?????)?b}?]? izdy?Is?_???)?s&??B:?xO9?V B?[??mbV|Ri{i???.?-??'K?^??????~????????izU?? +??e???-?@???{*?t??d? + +?[?N??:??8V"?I?????t)?????^?1??KOlB(??dA?Y???5 m ??|????G??d?_??t???Z???OUbw??g4tI?"L:?D?y??T?#?lX|?OqS?&?-?????b????>9C?W???'Y????????(`h ?9?X??,??|??? +endstream +endobj +151 0 obj << +/Type /Font +/Subtype /Type1 +/Encoding 200 0 R +/FirstChar 34 +/LastChar 122 +/Widths 201 0 R +/BaseFont /HYFYYR+CMTT10 +/FontDescriptor 149 0 R +>> endobj +149 0 obj << +/Ascent 611 +/CapHeight 611 +/Descent -222 +/FontName /HYFYYR+CMTT10 +/ItalicAngle 0 +/StemV 69 +/XHeight 431 +/FontBBox [-4 -235 731 800] +/Flags 4 +/CharSet (/quotedbl/percent/quoteright/parenleft/parenright/plus/comma/hyphen/period/zero/one/two/three/four/five/six/seven/eight/nine/colon/equal/E/G/M/N/O/P/R/bracketleft/bracketright/underscore/a/b/c/d/e/f/g/h/i/k/l/m/n/o/p/r/s/t/u/v/w/x/y/z) +/FontFile 150 0 R +>> endobj +201 0 obj +[525 0 0 525 0 525 525 525 0 525 525 525 525 0 525 525 525 525 525 525 525 525 525 525 525 0 0 525 0 0 0 0 0 0 0 525 0 525 0 0 0 0 0 525 525 525 525 0 525 0 0 0 0 0 0 0 0 525 0 525 0 525 0 525 525 525 525 525 525 525 525 525 0 525 525 525 525 525 525 0 525 525 525 525 525 525 525 525 525 ] +endobj +200 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 34/quotedbl 35/.notdef 37/percent 38/.notdef 39/quoteright/parenleft/parenright 42/.notdef 43/plus/comma/hyphen/period 47/.notdef 48/zero/one/two/three/four/five/six/seven/eight/nine/colon 59/.notdef 61/equal 62/.notdef 69/E 70/.notdef 71/G 72/.notdef 77/M/N/O/P 81/.notdef 82/R 83/.notdef 91/bracketleft 92/.notdef 93/bracketright 94/.notdef 95/underscore 96/.notdef 97/a/b/c/d/e/f/g/h/i 106/.notdef 107/k/l/m/n/o/p 113/.notdef 114/r/s/t/u/v/w/x/y/z 123/.notdef] +>> endobj +142 0 obj << +/Length1 1272 +/Length2 7410 +/Length3 532 +/Length 8210 +/Filter /FlateDecode +>> +stream +x???e\Tm??i?[jH???[Z@ ?f:?????P?????B???????>q????/?????E?d??9*Z???|bb|9w[?r]X\?W?P?9y??m??l +???? ???#@???E?????`V`??7@????? r?x??yp???`+7?%??????SG@?_?????<@.?HS6?Iv??5?? ????`?Z ?????;??;?e?+??]??P0???0????? ??8?w? ???i??????^}?f[?9?B@n1A??`We??Z?fe???????A???m???m?t??5?q??????cvt??vxG??|??#???????????????)9Z??????X??Xx? ?IB_>???y!yan?-dc?60??n*?h?K?y@' ?#d??[?????{??,Z??P?!??r:?? ??? ?????( ??&P?7??J?????oBFj?C?H?:?r?????!?X?&dN??????D??????y??yf??P??????Y??#t??ua ????,??" ????????????H ???oD???^ ???????7??????y?r ????x?r,??????]?????[?h??m??? ?Y? ?aV!???a??J?_?0?]i?KB??Bk?%xv~HO?4??????B?????){???6BKT?r??}???3D?x???z?vL??r??????5???Y??E?6?J?m>???H?????'?#K?4k????f?d??????_?*?B?x??$???A??0?0??.????-]?????fy?/??a???8??????37zK ???D?? 8??dR??nU:+?-F?!???}C2????.???8o???T4???$??N^?$?%??:???????Q??x????lh????Za????{?a3????| C??g??f?;????`.?7???iy???MEM???G??L?a????a?s?????- ??/8?]l??????%?$??n}w5BQL?4e?hA?_Z??{l?'??NFF?%T?~??p??V7?X@?(D?M???^???F]Xc???~???tXN&C??_??:????O]K?&?_?j??o????8r???1 ??Z?,I?,mW?zQL???d?X??A????F?/??4?T???k?V???O??~? +?.????Y???@?]?O}??;.?^??lu?+???y??~=c?6t?????????Y?jm]??&???wt???I?? ???xVB?Ji@?%???0????????????t????=?S????z?L?????~i?Ha?X??????9??:??l?C[^???? ?????hTuL??? ?D???T?r!7??`?O?I{?? +?????\????2`???S?2%7?6?vM?? ??0?hU????548)??????~???????? ??T?nt?????bA`???;U~?^??4??#h????u?Wj?9???;???[?b????C???p????Y?A???)? +V}?????}? 3~?U??U?.?????A?????J??}??.?q W???J?]???????J??/???o?:?Be?????M????????^??e +t??51j?u?R??f???????c?v?h?;?i??F??jP?h???- ?K?'O|/??f?n???E?????_*?z?i???s??|z???????L?F???? k????k&?????K?o}???-+=y{WH??C?m????????????"?D???Y?? +{???jK6=r? +e?t?m?Y????_?d.t???c}V??)?b?N???zT??????????A ???8z* ??>J???j?N???&%?4"?T?$B?^??,?Fr>???W??G$F?n1??N???l??9??/??#??Z???[????y???1??I? +??QB??????/Dd"????m?~???0???7vbL??I?S?w?Z??D?? +??dI\cSa?L?YE??'????lkU???f?!ZKv??q?)?^3????h?9????Rb?q?n???????3hCQ?I??C7? +?D?0;?? ???yH??}%??&I??N??????1?U?KW?????????P e?j??y??8?I?2?AL???q?K?W??[??"?v??????o>uM???f??/]0c05???{?????????????6U2??u??#???V!g ???z?x??6x?Rn??T|S???G\??upo????/?R@]fi#?0-^??2+hF(*??i&????jCiQ4?*?\ ??U?MP???gL???1^?H??`?"?9??J????$?(?Q=??????hV??+???S 8.o????S?k +?h????y$?vM?!T?????$???bU^??F3?[????gB???D??04{ ?FjM??-???m???H?n???3???QNcsA??@?_?-F??~R:q???`;#|????6????L*.\Q X? ? +????]?R???h?}????v???c????WKs??&??P??L???h??????R????E/?0L??4??G L?~q?|?@)\????7????^V??7????k?e????&5I??D??6??7F??????V)?O?"?g???*???R96:T??O??1 O?8?-? +????? zX???&?s? ?g??????=??o??L????????zv^?????>?7?? _/?=?~.?9????Q\?NB?w??,??&Ua?W-?1*_??^???td??????T +h?T??->?P??p?O????kK}'~?C????w???m"??x???)?z?g?.?3?W0????]U????C{? -??R??p?Em%??>?:??t?;.K??\(????r?7Y&??J???xO?????+??F??????#7???8???f&$??%?\?????$V?Hg+??)?B???v??V2????K??'!Qo)ZP^???C??s????9??y?r??|-.S1?z?8??d?nH? ???L????im??|0IU?~."?zY?Nv!????v?9k?t&$?!J??^+??P|m????a??(;0????A??c?h???j ?|??c?C??B>?6?Dg? ? ??m0~_???????8N?>??l????(*??9x??????0+??s?{v??|Ed??n?C???sQ????>w???y0???? ?6?wo86g*??:???A????/qqO ???????J???\?v?]??/3K?De~??3v??3$??8??O|??6?W1\?? +Q ??T?_?z$A?`2???Zk??Il?U?Sl?c??E??PVd??]????? +??8S?.?t?3???a??b?+??W?`? +????{?F?? ???%{??j????2??ezB? ??o??piO ??? ?????O??????mt?A?????U??r&??e???- +??/O??? !u??????PX?UJ?Y????<["?"???K??????????V??? j??????,YK +endstream +endobj +143 0 obj << +/Type /Font +/Subtype /Type1 +/Encoding 202 0 R +/FirstChar 12 +/LastChar 122 +/Widths 203 0 R +/BaseFont /SPOKLQ+CMSL10 +/FontDescriptor 141 0 R +>> endobj +141 0 obj << +/Ascent 694 +/CapHeight 683 +/Descent -194 +/FontName /SPOKLQ+CMSL10 +/ItalicAngle -9.46 +/StemV 79 +/XHeight 431 +/FontBBox [-62 -250 1123 750] +/Flags 4 +/CharSet (/fi/parenleft/parenright/comma/colon/question/B/C/E/H/M/P/a/b/c/d/e/f/g/h/i/l/m/n/o/p/r/s/t/u/v/w/x/y/z) +/FontFile 142 0 R +>> endobj +203 0 obj +[556 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 389 389 0 0 278 0 0 0 0 0 0 0 0 0 0 0 0 0 278 0 0 0 0 472 0 0 708 722 0 681 0 0 750 0 0 0 0 917 0 0 681 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 500 556 444 556 444 306 500 556 278 0 0 278 833 556 500 556 0 392 394 389 556 528 722 528 528 444 ] +endobj +202 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 12/fi 13/.notdef 40/parenleft/parenright 42/.notdef 44/comma 45/.notdef 58/colon 59/.notdef 63/question 64/.notdef 66/B/C 68/.notdef 69/E 70/.notdef 72/H 73/.notdef 77/M 78/.notdef 80/P 81/.notdef 97/a/b/c/d/e/f/g/h/i 106/.notdef 108/l/m/n/o/p 113/.notdef 114/r/s/t/u/v/w/x/y/z 123/.notdef] +>> endobj +204 0 obj << +/Length 172 +/Filter /FlateDecode +>> +stream +x?u?? +?0?[??? }_`R??S?V0????8????s?i??~?c??%[?? ??-?3???%;???5????Y?#?;??Q?d?Q????}#S????)???=?/I`" ?K T?!?b$?c,"*?RGr??TC??D??}$mJ:>?? ?u???4R]FO??D? +endstream +endobj +140 0 obj << +/Type /Font +/Subtype /Type3 +/Name /F34 +/FontMatrix [0.011 0 0 0.011 0 0] +/FontBBox [ 9 6 44 40 ] +/Resources << /ProcSet [ /PDF /ImageB ] >> +/FirstChar 136 +/LastChar 136 +/Widths 205 0 R +/Encoding 206 0 R +/CharProcs 207 0 R +>> endobj +205 0 obj +[45.2 ] +endobj +206 0 obj << +/Type /Encoding +/Differences [136/a136] +>> endobj +207 0 obj << +/a136 204 0 R +>> endobj +79 0 obj << +/Length1 1788 +/Length2 12852 +/Length3 532 +/Length 13861 +/Filter /FlateDecode +>> +stream +x???eT????Mp .A ww??????p????.? Np Np??????g'g???????5???|???\P?*?2 +?9?%?]Y?X???*?,V&DJJQ?????^???`??eHM?_?o>N>N6DJ????'????@#J?O7@??25???X??9L?m??V@O&???-@??'?*@g ? h???? +0?2u?-?????$mo???/???????? g?)??l??&??m=f at sDfp5 ?????3???????????i??1llge????]]? ???d???????&4?r???Qic[+Sa{ [ ???$+g +??????%?????/ho??&????feY))??Z??)[???y:?;????b?? ?????n/+8?????????????????? ` {"?w?8??+{3??6??d??~??/???????????_?!?? ?????? Nfkl?G?0;A?@{??/??????u??ep?1ho 4?Ke????K?:???Q8?????@?????[9????~?m??-?(<f/ ?????`?7s?}????;r?????-?????}r?+l??d?f?Yg??_^??? ?S?[??????s?u??!???Y?O? ?\????!pR???(??????M??iK?!??%?x~?\A??+??!p?????o?WP?C?&??!p??n???O??sj??????=? ?n?????6/ ???_?!?/?gM?B???l??/;??X??b??=???`S?!????? +????B?+?????/???????l??/?p? ?6??B? ??:???_????mx? ???MD????? |J?,?????????"M]A?#???_? ???? +|??@S?????????C???MU????????}?)x1??m????z??ce&??=?=?g???q??@?????=7??L?V??Ly?]G???c??????w?3??_???r.? ?h????VI!zt?{?>sqkId?????S?????;} ?[sGKN?^? ???o?_?t?p?x,??Oz????C7? aW???R?<)/CC?{???X??{0??*#?????59^????????n? ?Z?/?\???;??????2?%,?E_c???^??????I??>?I.1z?:0?(????:?YSX^????????????]?2??}K?????Eh??M?P??*???6????????c?.7Eb=?j6?t?f?U???g?? }?)1???M??T??T?? ?w?=>Op?Hc ?"??"{??:??U???W? ?????H?I???S????k??)?????n}G?N?\eN????~??=^?S??u?R?Q?e?O??X?t?XR>.Z?#????T{%L? Q +??f???/??s??;q????#???q???????K??t????vPTuo?V lC?v?z,?mNE??iw??XZ"?(???g~+J6I?6?SV?g?M????? u?U at d????p????y?E]Y? {?FW?z4????u???5? +??JjO?rJ|r??U?Wwp?????8???????&?0?]?)I\4T0?!???Dfb??e-'f????n(?? +v???q????p???+?2??-???1?d?D???????Yld?!??#????????s?s?lU?K?h?\?{d????C?Y?- ?E?????r?S??__?]??Po]Q?????q??q??5;?d.]*?)ec}????0 5?k????S??e?h??W? ????Yy??2?5?Z??@?1N??? g9??1zg??"?W???~??MH?W2???dJ????O`?G?7?w{|vq*?????x?9V6????W?C.?? ?????z??o???????4??????3Gj??A?}?????aAn???3X??J??sd??S??kx??3?t?9?0F?HS9r??%???pd??????$;5?!?0??M??m???????O?b?#:+???3y?????,?+?T???{~?7?G????z:I????\*z?J??4?/z'=n?o??&?}?????????k????????FO?k??}p^??7? ?H?Wf?$Ee?5?:` ???y?????D?#?9????? +??h?????Z?!T\5?u?;>.#W}H??m?????l?m??z*?=\? +??hBm>z_ %??+*?y&?P???T?,?9F?bx1a?_??(5???-?u ?s?}~?EM??ydL,??Ap?U???q @???6?-????F)p??????IE?BdC?+??$/#DL????1???ZV*?"??]??O]?5?????????/+????F?|t???? q%f??B[?S??55hC???????k?>?wnt?:]c?l??3?f???45???0@??n?&?m?x?X?? r?F?wg?????T??C.?]???9yzX??B??nY??%2BT?'S?c?Y??k??t??0??wM?(l$FO?????????h$??d?sC??g??d????JA???.??~??????????6?Kb@}OR????E86F?2?%3?%?9AZ>:?X>^Dc@?BpkN?m?????5V?|?=?????????u???H??]7]pR???m,?9??H?|C?h???ya?TW?)?t????1??{ ???d??~4??"?F"??>?zV?????Q?? +????J?D8???"??O???$???8#????????>8???L?h??Zr?c???s?? mr?>?q?Gri??fw^sX? ?????TB ?????X?p?xM??)?}a?c?i?y%l?v???X=???$t??Y }? +???? -?4??r????^k??*?-?V???????A?^?tx ?? R  ?e?\?&?m?E?]?????p=}???o???_??? ???M??x?????2V??????~]???o7?Rk;"????????Ql??m>???<p??01]????~G????????|??s/Z9?{A?Be5O?dD!??])?Ct-khv?? ?Hu??0"#&?????J;?0?kAV3???????"????f,G?Jc???:??)???a_?b???-?Y?r??? ???rU?bW,&pUv????*Dt5??Zv?!?I?yQ????z"?p???>??,4?????xt@?d?IB?9+?????%p?4?!?H?? I^??]9?????]??h???G???>?7?c??=?P????}Z?J??t Q????$?? +n?]?S&:??1? ?t???X?&#?id?'??x??%?k?&?? a?M?p??G5?6??:=?t[t$E2tE???FAB?o^?????%G6??(???9?G??p??(zD_B?,L??p??????/????$?*W??'#??????A??&?e?l-?????????cH+ Y?????Q???=::?????(? is??1??dTZS$??#?D?v????MR?????????M|???Oo??LP??5??5?Oh,['?v o(???????V???A?z??\??N7EV??ZP???hk?AW?J?rd?5?y?\???v?"?XgW1????????r1??C?6m#??~?????JD?m*%?>????K?1?'ET>@&??????+??~??[3??@K???/9??{1?S?( "?q?a???wR????i?as????? ?????????Q?U????Y?""??4=?,?)|dM??:????e(???????}??s???#?Wy????hy%?%5?D???5?A????}o-???k1p??????jc?V?C +M?V??>*:?4?[?&?T?????? *?o"???RR???n???Uh????#Y?iRC1?~??, +"?l7_{?? +???g?3?u???h??:?"UB?U1mZpQ?'?2TZ?H?????e^(?S??y??}lP???i?!??r??;FX?????????b??? ?t?( ?/y?_??8?k:??:k????3??n?????{??????%C?>?@?!?X??"? ? o????.???\??[?#-??s????!????QW????????????HH?7??&?????????????}?R?M????h>????)? ?M|??????6??I3?#????4????h?P???`??????#@0???????n;D?jY? V?=??0???$?????I*Z?=???Qx??'?f?/%/~`\?~2v\??_ +e?b?hS Kb?????)5I????>$????$???w??y??.l]????0?3??V,???P??;?W?`?$?Q??%??*?e?\?h?r?]1???? ????P?????NbY???#O?|??)???????x?F???=???xn,?]9?? ??3d?? ? ? ???????_dr?!P????^Y?h}???*??.?????L?(C;?D.??n? m?Zo???>?ji?yb??t???Jp???+? T?O????????[???xXho?q?,%??? +7f???e??5Bl?=??X?R\&?Z]?Y??fO??:e??[???????????|?q:w??jP????????B??~L?PT?j?w?O\r?F???k??~????o?+????????87?7?q????a(?(????l??????u??U?a??????? ?X>R[???/Z???_5?dhS?{ ??D?Y_??h?????'?l?oh?l??x??t?u???'?I????F?tS?????\??F5??????z?]?s{Y'x[?m?? ?!?v??}?5qK?E=?Ex??"???l}r w:??f1??x????/Mx0?{?0g3??F&(???????O??? +?]?| ?S???H?>W??D{???^?/?g?I???#??7 +/?fO??C"????K?? ? ?|:??tz??????`?*?9?:^.??1????`?? +?~\?????NQ????O??????*JiD?lPy?:WjW?t)?e??T????i? ?UG??*?K??? x^????z?m?1??;??w??????\d??n^???M?!?-?sc?HZ?c?K?????lbO?6?,U'.??P??????x?:W??Cv?!H|???~ W??O?}???V9T???^?^?z!??m0fL???????G?[:?t??Q;???T?? 1G???r7?Lr?O?d1J?i???)Z?}??wd?Z??@ }????Z`??&??!????%?-o?|?B?O???[?a[?`U????2???U|?!???@y???H???[??Xf~??Q+[m?;zji??h????? +??4p??2?Y??c????????^?=?8}?????'? `???y?M???$u?? Eq=aq??l&???u??)?;g@? ?~9?F?8???????Q@??6?Z?e?F?%s?1O???:?B??????;?w????;?????INt?? +?5??0?????^?????CIBy???5y??q????????{??^?8pN?7??P?? K???'C|???d$??i?????w?I???? +?????????Iv; +?????#???r4??(j)R?:?O??a?2R????1?IoM?v??9 ? +VXu? ?p?2/??+]a?'???N?=2*KB5?}?W3F??t??????$0\zLD.=?8 ??T???cD???)?L?j'?8o????\??U?Qy~?H ?=Y7?k??b?a?&?Q??BS@,@???WPE?jI?U~?? ??????!=?g?h?_?>D\????F(?A????_?m?e?-???_{?d?Pq???ka?0???-????lN?P69PJ? ???'?T,??d]i^?/j??x???E?QQ?g?s??d??????l??Ru????????????S?"????????@?L???^?x~???L???ib??zWd???N???6?? ?a??cG?? ?j????? PK??x??n?z??c???U??j??-?CC??)??c????~? ????????$?? +?>??|?-}??n??z??l?Q?TfK????? ???cC?W??"?!??z???`?geA5 A???????Z?J????X?"?/?????=?c????e?????=?pI??? 3M?'?? ??"WiP??????5.???????ml/A?+6????????????_?PhC?*\R?%;X??$%%?$?I?~'o?????P?F??xv?9:?A????????= `???? ?5?>,?,)??(?_???{w??O???S??d??9m.w?.?i?6??_?;??L?UR??=S???M?O,w2????;Ew????????bJ?m???n]??{P??3zC?BK???~?g??_?(????%o?0??=I??fi??}i?H?1?d???????5 ]???????sa?????G?OV{?N?:\?_,???pZQ???BA??9x?,?7#?j???????s? H+??s????B?h?bo???$8??[??_?&TH?n????(?N?L??be!?T??????@??dFQ?"???Q??J2 g??? j?~??=?u?y?\g?'?/?Fbq4?5??ME?D??????????g????y??Io?$???????!>> ???????u??c7??????????%????I??????L?Wl???N?k????$??D?(??a8W??<;($?{?T???0_U???R?????z ??l????!2Q??????|F?u???y???????k&???t)?RD? ?Qt5n?????m??.t??e2E?d???Y??zv??(??t?FxC?{O??? 9?/v??? +.a???wX?P??1??+nZ?sJ??I??t?H?T?ra???:_??z?qr???z?RV~?x?\??????V?YW?qr4?7??}s????T~?_??j??{zvo??-R(H6q?????????A?L)?&?q)??OS??v}H???)???x;? ??9{ 8?????????x?s^??????t? 'W?G?2????Nnv~?|???|?R?)r????Kh ?????/???f???????\?????a??q??zP??D???`?b??]?B?????>e?/???'?{?\?_???s?Y#3?k?_?%C??????%_:.R!????{Bzf?n??Lq??????Mk??SS ??Z=??D?b??k?rH???E{0DK???S?1??6?h???>T?r?&????0/Dg7}:??? ??q#3 1??C|??PB?v?? +???????Z??{. ??'??J{FC??;?yF????A?V.??0h?1??n?f_6????zG??9???Dm8u??Tf?????????u?=S???8?dq???P%/?N?q.????????]??????h?Zx???;n?n????? p?5??E??w???U??H?~8!?04???@6?4U??;?y?7?o i?,???l???????)M?Gh!W???8t??[I>L?e?????@L k?Uf???@A?g+?l?U^???)W]#??Hvc??/?@??'/??U??????????8?w[?*??x?p?????q??~?^%?????p2y?nh|)?;'5?i???.???_??{ju1??6???yw??l?M???w???FL??i??'???)ukVL??]?X??g????W?UW?4C|'%v?j??'Y??xl???&#???d:A?~?6\??????? ?o?????g?m{??[@??/??;?????s??0?????X?E?????)?=@)-=??y?j???????X6k1)U?? ?f????!??????~??U???L+z??O???W@?$y????Rg???_??GHx????;_OB]?{????O??#?????????]1t??????i?b?a?W?.:p?U??\?U????????5T?r??G?&*W?R????QV9?:f??>P?M#?M??8*V?W:??iT??om??p?P4?t??????K??O???AWR?????a??b??F??2????V+???? z????{c_'?M??SM??r??%?Z?C?@v? 2??d%?? ???????C?1:?C??m3%??? o.?9iK=sm;'?ga?Ex:??W???BM???Hn?#?$)7MA???V??Z,?Z???9??.????:?*?Y?????8????(8????Qx????.J&???;?q??1??"??Y????_??+???A.v? ???#h? +endstream +endobj +80 0 obj << +/Type /Font +/Subtype /Type1 +/Encoding 208 0 R +/FirstChar 11 +/LastChar 122 +/Widths 209 0 R +/BaseFont /QKHZHN+CMR10 +/FontDescriptor 78 0 R +>> endobj +78 0 obj << +/Ascent 694 +/CapHeight 683 +/Descent -194 +/FontName /QKHZHN+CMR10 +/ItalicAngle 0 +/StemV 69 +/XHeight 431 +/FontBBox [-251 -250 1009 969] +/Flags 4 +/CharSet (/ff/fi/ffi/exclam/percent/quoteright/parenleft/parenright/comma/hyphen/period/slash/zero/one/two/three/four/five/six/seven/eight/nine/colon/semicolon/A/B/C/D/E/F/G/I/L/M/N/O/P/R/S/T/U/Y/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p/r/s/t/u/v/w/x/y/z) +/FontFile 79 0 R +>> endobj +209 0 obj +[583 556 0 833 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 278 0 0 0 833 0 278 389 389 0 0 278 333 278 500 500 500 500 500 500 500 500 500 500 500 278 278 0 0 0 0 0 750 708 722 764 681 653 785 0 361 0 0 625 917 750 778 681 0 736 556 722 750 0 0 0 750 0 0 0 0 0 0 0 500 556 444 556 444 306 500 556 278 306 528 278 833 556 500 556 0 392 394 389 556 528 722 528 528 444 ] +endobj +208 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 11/ff/fi 13/.notdef 14/ffi 15/.notdef 33/exclam 34/.notdef 37/percent 38/.notdef 39/quoteright/parenleft/parenright 42/.notdef 44/comma/hyphen/period/slash/zero/one/two/three/four/five/six/seven/eight/nine/colon/semicolon 60/.notdef 65/A/B/C/D/E/F/G 72/.notdef 73/I 74/.notdef 76/L/M/N/O/P 81/.notdef 82/R/S/T/U 86/.notdef 89/Y 90/.notdef 97/a/b/c/d/e/f/g/h/i/j/k/l/m/n/o/p 113/.notdef 114/r/s/t/u/v/w/x/y/z 123/.notdef] +>> endobj +70 0 obj << +/Length1 1310 +/Length2 6334 +/Length3 532 +/Length 7121 +/Filter /FlateDecode +>> +stream +x???e\Tm???F???Cdf?S????????K@??PI ??Q?;??3????[????? +?Oa?z?PP????w??%?????0??x,~Y|??> ?X??5?2?_8_?~V???w?}?:???7jCtr?]T??$????L3???P????G???????????|?????q{?? g^o??O??p??m?? VI2??U ?????Y?c??Iq?Cp?N?eu(v??S???????J????z?f?? u????QB*f?n??-???&??I?M????j ?Y?????)A?????Q????Yy?/???5?w?h??p?V??X ??7?,]????????=|????E?4?\O??????.??#?A????C?{F.XE?fwW] m??$?Q??Z?a?!???(??? %???A???GS??nOw[?\(CN???{$?Y? +???Z??Q:y???3?8F??? +f??a1?????^??;4???}????L?+m&??4Q& ?P????Y?*???D?/?u??&????}x&`?????]??????!s?xD??(?l?Y???x??u[k??{5??q Xa?t0b\qk?UJ??X6??0?A(? }&t??s{Q??i?????r ???][~??I??iv?Q^????? +???????OqF???mj?B8f?.&?LRt???????vl??MN???"0???Fy?&C??mH????W????'ws%rP???U?z*??k?N?7??0 &>m? .?m? +o??iY????? RZ? ?_?;X??.Z?%??c??)?hp'????'}?Y(???C#?????? ?H P?T.[;??{}uJX?_?V??$???C_P??&G?|?&???P}~?Ex??th Q??U@?????K??_???? +????x?O???7?W)??X?t??$???e Q??'???2Y)?o????>?%??fAj]'w?p?1'w?L????Y?0&?M??\r??rn*?W1?B?????/l?6?[?z??b2S??9?#??r??5????g??B2?jCj??????B*???$???s? ??mY&?#?4???#?3?7Q????p??]???y?-??r?BF? ?c???Al????w?B,???6s +??????j???P2??+tm?Y:????uq?2?U??,u?? ]???[<?r?W??}]|I??RU?[?o}?Ok?F?ZGGX?;??s????!?????w??r5?8f? ?`B?`???7?^B?????rl?+;?K*R??a%??W-?X>o??R?3??5????q????u??"vptG??X?g? g??WG?f?p7X????wm?Bt??2t?C??S?????????:??&L1JP +?A?s^??5?Hsh??Q~?GZ?}????? O?Q???,T?G????[ +p????R?;???w+?s???d??7?w???DPj????@,??'?=??*??Km??p?j????]?nJaMFh?r?3L?[?-???e????p??U???.?$?? +??;???C?Z)TI???Fe>?]?>?=[N~??K??? +\???\6]?sB??5??ge??d?=??R??8?P?wj?_?_ $'??????8N???i????=????j?????U???ac???w???????R????1?S?W?P??b?5u?????????7?v/l???????4S?j ?C5?wc?j+???]?K?qU].??%????R???????$?N?"g??}b?>S8?^*????\I}F>????????????i? iml?? ???p? ?o????L??? ?Hk?T??????7?x??%?.??|g8?o?!????Bsb???`*|?Q?}??u=#???????u`?y????fs????l?E?????EW??:?????l???&w?/??j'?G.?]??6?/?Wc:?1?P@??y??6mb??z?9?`#n???4c^B??a;T?+EI];??k???Yk>]?Toz{???o@?>?+?~??^???}a???c?Z??\?u0?#7?\??5F f+??Y???`?Z??!:)x??"?5??'?=^?????!?|,????g'?0A??H%??? ??????(y?qU?W6?A`/W???U?GT???~o[,?D)???6???z???q??A?? .Aid?Z?QM??????::?R^??r?v???R???<?[???PO?b??.? o??r;$?g.??)?e?4S??X????????9>??5OBf{?ftQ????????o?x?"A????*???y|??????>qD?eRI?U????:?b????v?SYj????Q?'??s90??:?;-?Q???C?"?:Ey?}?Z??m?M??y?3???2j){|u><I?J\?b?Z??????t?? ?Qp?s}/O?F?qK??*??n??? ?v?'?.-??A?/?*?V7?~bN?w#$A?k"(J????o?????G?l???=`n??;}???;(k?H.uR?D?? g?,?5?????????{????+?? +??p?}?w?jHd??h]????MQ"?W?[?s???????6???8S?(R}m?S?0????P]? ?DHp,? +??Z6??Wt??h??&??P????m?Q?"???????]F??????E?^i????~?&Fh??E?????'~?U??????P E??O}?W&Jk.0Z86n? +P?l??c??9?H????N%?????AjZ?v??j?????5???? ?d6-}H?B???2??0?F???t\MD{?W?^????tR_??U???? +~???9>EI??e8AO???$s?????8qcLPJ??????^??"y???)?9??h???:u4?t????NS?8t????s}+a??Ur?S???0?E?c??"??gC?6t?C??:D????zT2&f????'9?_*??? ??[T2??1bg??????T? t??y??b??~Tj???/?i5?H?EZ?N???a2{????7Q??kr?UQs??-??1??$???uF"??&???SC??#?W?i?2??K??T???3Z+?????48??????e?-+??n???21? ??n;Y??s?v????9-??~V?/??^Px|??)?Z?7yy???????|??s?/?@jx???+??rD?Uxj??Q7???E(??tyV?|?F=???6??x??X?az??w?x?????g?????E??v?????Y?r&?c??>??`j??A?>X??y"}`Nu7??%???????????? +???N[???GS s{?~s??*v?#?O?p???.??e??A???????[9?????K????w??Xp?z?? ????x?5Q??9?1??}???>EK???r{??Kg?}????:W????????????:??O????]??B??????_< +U#????????6pvE???y???/????????k8? j w??_}"LI +endstream +endobj +71 0 obj << +/Type /Font +/Subtype /Type1 +/Encoding 210 0 R +/FirstChar 44 +/LastChar 121 +/Widths 211 0 R +/BaseFont /MEUVCA+CMSSBX10 +/FontDescriptor 69 0 R +>> endobj +69 0 obj << +/Ascent 694 +/CapHeight 694 +/Descent -194 +/FontName /MEUVCA+CMSSBX10 +/ItalicAngle 0 +/StemV 136 +/XHeight 458 +/FontBBox [-71 -250 1099 780] +/Flags 4 +/CharSet (/comma/one/two/six/eight/nine/A/B/C/D/E/I/M/N/O/P/T/U/a/b/c/d/e/f/g/h/i/l/m/n/o/p/r/s/t/u/v/x/y) +/FontFile 70 0 R +>> endobj +211 0 obj +[306 0 0 0 0 550 550 0 0 0 550 0 550 550 0 0 0 0 0 0 0 733 733 703 794 642 0 0 0 331 0 0 0 978 794 794 703 0 0 0 733 764 0 0 0 0 0 0 0 0 0 0 0 525 561 489 561 511 336 550 561 256 0 0 256 867 561 550 561 0 372 422 404 561 500 0 500 500 ] +endobj +210 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 44/comma 45/.notdef 49/one/two 51/.notdef 54/six 55/.notdef 56/eight/nine 58/.notdef 65/A/B/C/D/E 70/.notdef 73/I 74/.notdef 77/M/N/O/P 81/.notdef 84/T/U 86/.notdef 97/a/b/c/d/e/f/g/h/i 106/.notdef 108/l/m/n/o/p 113/.notdef 114/r/s/t/u/v 119/.notdef 120/x/y 122/.notdef] +>> endobj +65 0 obj << +/Length1 884 +/Length2 2417 +/Length3 532 +/Length 3032 +/Filter /FlateDecode +>> +stream +x???gXS??QA?)??(UJJ(? U??"RB???@H????T)A,Ei?"HG VD?U???CS?FO???y???????w?7??gf?E-??Nb????@?????]3?\??????uI ??#?PdP??????@???J??J???.??J?yx?)]??"??H??Q? E?}5?(<`MD?@2U8??V_3?+?$?yv8???d?????_?"`???a ???????0H1LJ ?"O0 ?jNd?2??7L?\??????|???????|px?? +??/? ?3"$~????C??x????OM????PF( Eegl???8?#`? b8???dF +?hI(?%????SU ??| }#?????? +@???E?Mp?(?*P??H?G8 ?~@F.?T??? ???????D?B??9e8C???(?B?M???H ??mc????8?(A0D???'.y???* ??yU?r|???f^????#b y??g&~???s?*N??L?L?~??k???? ????x}s??1~?o ?v?.??Z???S???0?:l????Ht??+|?h??????????????8?fdA?AK?%?????@????V!?????Aq????u@??eX6?n4???kL.iUD??}???????)?P???)Q??r??????i?& ?7?]Da&]b?Z?5?Z????l|?={????t?q?mj Q ??8U??#?U???\??'??/^??????j8L?#????O?\p?I??????? +n?)4???i?GOgz????t8i???????9e???>??8w??M?J@?%???:N?"Z??????)?? ??o?>???-??4???v&??+y?^ld??s}4Epro?W*??X? ??v?7hM?????QItc??H?\??%Q??iuo??d??v??-qa?t?[t Hl??WeGla|Tw?Z??77?F#?????^4O?2?g?h? ???t??i#?Z???,#?y??u?d&?2???? ?j0? y???[Z??@??z?EsF^???X?$?y?f?????*?)???????0??\???xq??z???UD?Z[duNmw?? ???????9??M??e9???S??u???#q?????v??>?9?m>??8}_*?1??2]6???*?x?Xf????gK1r?FwN??Ui?m??"[X???S??:??#??M,??#3????????W????????m???=?o3@? +9?2?????K8&?U?j?ze???.??z}|?g???E =~????1?3d?L?z?v$???e??&???d??by3??k?%Dl}??8?n??pdd?[ ?7?N?rl?"??)?G?d???yJ??D??????A"h???7?????3??M?7??<;3?M?C??_ +??yw?Y????S????G?(???1}p???????#?p{e:?1a???j?n???1y?????????TDc??J?fOo/?L8?!?Ky?3{?>#?Ur?N?????~Z?O*?+??]?g`aFc|?H?|^??u?8Sc#??k?e?&??%??2Eu9?Y,7?K??$?^*??x?hrn?????????? ?|}??> endobj +64 0 obj << +/Ascent 694 +/CapHeight 686 +/Descent -194 +/FontName /DHVGFG+CMBX12 +/ItalicAngle 0 +/StemV 109 +/XHeight 444 +/FontBBox [-53 -251 1139 750] +/Flags 4 +/CharSet (/T/a/b/c/e/f/l/n/o/s/t) +/FontFile 65 0 R +>> endobj +213 0 obj +[782 0 0 0 0 0 0 0 0 0 0 0 0 547 625 500 0 513 344 0 0 0 0 0 313 0 625 563 0 0 0 444 438 ] +endobj +212 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 84/T 85/.notdef 97/a/b/c 100/.notdef 101/e/f 103/.notdef 108/l 109/.notdef 110/n/o 112/.notdef 115/s/t 117/.notdef] +>> endobj +61 0 obj << +/Length1 1093 +/Length2 4965 +/Length3 532 +/Length 5675 +/Filter /FlateDecode +>> +stream +x???gXSk??A:???H?????B??*EC ???JQz???(P???"R?K?""0?|?????9?k??????Z???k?k?s???9aZ4N\RBR?a`* HJ????5??A??? +IyyI??? @ +??S??(H?P?40?X??+ ?!?? P?@`?p`??"<p +`??#? ? +0?u?`??F`}N???'$pD? ???_?t???_a'??|Xo?)???0?`? ?F???@C ?????????|P(C??/?_C?oi???O 0?8!???Z!????????gVC!?jh?W????G8#qpW?3 ???3?@;??anZZiYX[???>????h?y??_????d???0,?` ??$ ????_??????c??h?B??`X,,????J?h'???O0 ?@cp?#?L??,??????????9yP?/K?7?&0h?A@??_D?]??*??_$ "?:??U?o(:?????oH???????????7?????`??o?$?@?????`??7$??????????? 6|?F?? ?? ?????(???@qii???, /# ????:??@??? ??ovFv??G??g&1p?H????????22bu??D????W4?I????z^"???????.???q?????m 2??r?+1s?|???Z??f??????? +(?????px?O?R?C6A???C??V0??_(\g[?d?)? t???(?\??????U???J????????o??v???Q-????.???',|?9?O? +MwU??U??f??D??D^??]?????y??O?A3???????IE|*? ???~R???????a?b?t??R;S"? +Z?X?R???E? ' ?'???a?O?S?r?VN???7?d]Dl??W??????v?sk?4l?yv?)KSD?`??????U?L-+jD???,?:?? (1G=v????.8?L??-*{~^K??R4??{??s??f?;?12C?d?9????/I???"?gBRxJGU????&?V??|?*?W????F?&\???t???g???\N?h??2?B?ie][?+F;??I{???H??????/?|??wb??|M?Zr0T?R?M?D????xk??? ?Xc??? ???Q??????^=L????[?????f??,?l?B?* nym???????1???E??w?Xzfe?????^k? ?lK?<6R_?^?^???w?Ka???~??? N1??????G?4?wR??9??EC?N??)1?Z??!9?????*?C5?5???? +?4???z@??R?S?8?>???*r?^?f?QI?-?,j?%+u?c??.?v?s"?G?|HE???/?\z?k ??J"??>????t?)?^^?s D??z.??G-m?????2?e??n??7??g?????;?_?Z??c??l?_??4?DD??^?X?}?I?R_[?8????{?@ +???9?????Ze??? ??a?5Ir#c?)??????V????#b?)??p?sytp??????}~?l???11?????????Q? ??h??h?f ????6???????HZt??*$???yy??t????M?????4=??Tj~? ?+?'e??#????h +?~?p$.?U?s?]?????5g7ac*??,9p? +??????z????NU!?4X??~??iI????}C????XMFV +C?n?*@?????B!?C?u?[??a?M{{c??S?PP??i2??] ?????n?_?2???|8??XXk?}ct2???? ?I? ????f?H??? ;??? }w?)?Y???gg???y5?Nf??4\?F?9?Hfq???Tg?????'??}??Ckv?bX??Rv??W?n)0????:????c?I??PR?????nq?2z[??2???s??D???2?y at 0?{????E??v??Zs???m?{????F??m??Pd6?q*?????^???N\???Y?????+Vl?$?????{P???C?3?hr??7?????h???%(??`=?%?>???k??d?ux#7?C???j????'??w?n{3???p?V????:LJ??lF??#?>?:??>????????YMG????/??? ?l??S??jOV?.?}?O:z?n?j?~?b???kI4)??cN ??D?0?Xa???4HB?????M\{?t^?u??????kQz(R?)V?X?.)????~??e??^Hl?SZ??=#'L '??~pQ??R?r???OlQ??|?????y?J??????Q? +K??i???$7of?????o??l?2K??O?}??????i'?*%?yZD?????sG???j]W?X????P?<2?w??m?%?DE?}q''????]AA???>??T?8?4????%???O?????????< +?|??5?:u:$?TH??Qn+CR?p?0kW??{f?xH?>??/?B?B??8? ?N?6%Y1 +endstream +endobj +62 0 obj << +/Type /Font +/Subtype /Type1 +/Encoding 214 0 R +/FirstChar 44 +/LastChar 121 +/Widths 215 0 R +/BaseFont /UOFUXX+CMR17 +/FontDescriptor 60 0 R +>> endobj +60 0 obj << +/Ascent 694 +/CapHeight 683 +/Descent -195 +/FontName /UOFUXX+CMR17 +/ItalicAngle 0 +/StemV 53 +/XHeight 430 +/FontBBox [-33 -250 945 749] +/Flags 4 +/CharSet (/comma/E/G/M/P/a/c/d/e/f/g/h/i/k/l/m/n/o/p/r/s/t/u/x/y) +/FontFile 61 0 R +>> endobj +215 0 obj +[250 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 628 0 726 0 0 0 0 0 850 0 0 628 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 459 0 406 511 406 276 459 511 250 0 485 250 772 511 459 511 0 354 359 354 511 0 0 485 485 ] +endobj +214 0 obj << +/Type /Encoding +/Differences [ 0 /.notdef 44/comma 45/.notdef 69/E 70/.notdef 71/G 72/.notdef 77/M 78/.notdef 80/P 81/.notdef 97/a 98/.notdef 99/c/d/e/f/g/h/i 106/.notdef 107/k/l/m/n/o/p 113/.notdef 114/r/s/t/u 118/.notdef 120/x/y 122/.notdef] +>> endobj +113 0 obj << +/Type /Pages +/Count 6 +/Parent 216 0 R +/Kids [54 0 R 137 0 R 157 0 R 161 0 R 167 0 R 171 0 R] +>> endobj +181 0 obj << +/Type /Pages +/Count 3 +/Parent 216 0 R +/Kids [178 0 R 183 0 R 192 0 R] +>> endobj +216 0 obj << +/Type /Pages +/Count 9 +/Kids [113 0 R 181 0 R] +>> endobj +217 0 obj << +/Type /Outlines +/First 7 0 R +/Last 51 0 R +/Count 7 +>> endobj +51 0 obj << +/Title 52 0 R +/A 49 0 R +/Parent 217 0 R +/Prev 47 0 R +>> endobj +47 0 obj << +/Title 48 0 R +/A 45 0 R +/Parent 217 0 R +/Prev 43 0 R +/Next 51 0 R +>> endobj +43 0 obj << +/Title 44 0 R +/A 41 0 R +/Parent 217 0 R +/Prev 31 0 R +/Next 47 0 R +>> endobj +39 0 obj << +/Title 40 0 R +/A 37 0 R +/Parent 31 0 R +/Prev 35 0 R +>> endobj +35 0 obj << +/Title 36 0 R +/A 33 0 R +/Parent 31 0 R +/Next 39 0 R +>> endobj +31 0 obj << +/Title 32 0 R +/A 29 0 R +/Parent 217 0 R +/Prev 23 0 R +/Next 43 0 R +/First 35 0 R +/Last 39 0 R +/Count -2 +>> endobj +27 0 obj << +/Title 28 0 R +/A 25 0 R +/Parent 23 0 R +>> endobj +23 0 obj << +/Title 24 0 R +/A 21 0 R +/Parent 217 0 R +/Prev 11 0 R +/Next 31 0 R +/First 27 0 R +/Last 27 0 R +/Count -1 +>> endobj +19 0 obj << +/Title 20 0 R +/A 17 0 R +/Parent 11 0 R +/Prev 15 0 R +>> endobj +15 0 obj << +/Title 16 0 R +/A 13 0 R +/Parent 11 0 R +/Next 19 0 R +>> endobj +11 0 obj << +/Title 12 0 R +/A 9 0 R +/Parent 217 0 R +/Prev 7 0 R +/Next 23 0 R +/First 15 0 R +/Last 19 0 R +/Count -2 +>> endobj +7 0 obj << +/Title 8 0 R +/A 5 0 R +/Parent 217 0 R +/Next 11 0 R +>> endobj +218 0 obj << +/Names [(Doc-Start) 58 0 R (UNDEFINED) 114 0 R (advanced-topics) 120 0 R (basic-estimation-of-mixture-parameters-from-data) 118 0 R (basic-usage) 115 0 R (bayesian-information-criterion-and-automatic-clustering) 122 0 R (bibliography) 134 0 R (creating-sampling-and-plotting-a-mixture) 116 0 R (docutils0.0) 6 0 R (docutils1.0) 10 0 R (docutils10.0) 46 0 R (docutils11.0) 50 0 R (docutils2.1) 14 0 R (docutils3.1) 18 0 R (docutils4.0) 22 0 R (docutils5.1) 26 0 R (docutils6.0) 30 0 R (docutils7.1) 34 0 R (docutils8.1) 38 0 R (docutils9.0) 42 0 R (examples) 124 0 R (here) 146 0 R (id1) 67 0 R (id10) 100 0 R (id11) 103 0 R (id12) 106 0 R (id2) 73 0 R (id3) 76 0 R (id4) 82 0 R (id5) 85 0 R (id6) 88 0 R (id7) 91 0 R (id8) 94 0 R (id9) 97 0 R (installation) 109 0 R (note-on-performances) 130 0 R (page.1) 57 0 R (page.2) 139 0 R (page.3) 159 0 R (page.4) 163 0 R (page.5) 169 0 R (page.6) 173 0 R (page.7) 180 0 R (page.8) 185 0 R (page.9) 194 0 R (pyem-a-python-package-for-gaussian-mixture-models) 59 0 R (scipy) 112 0 R (section*.1) 111 0 R (section*.10) 133 0 R (section*.11) 135 0 R (section*.12) 197 0 R (section*.2) 117 0 R (section*.3) 119 0 R (section*.4) 121 0 R (section*.5) 123 0 R (section*.6) 125 0 R (section*.7) 127 0 R (section*.8) 129 0 R (section*.9) 131 0 R (tables-of-contents) 63 0 R (todo) 132 0 R (using-em-for-clustering) 126 0 R (using-pyem-for-supervised-learning) 128 0 R] +/Limits [(Doc-Start) (using-pyem-for-supervised-learning)] +>> endobj +219 0 obj << +/Kids [218 0 R] +>> endobj +220 0 obj << +/Dests 219 0 R +>> endobj +221 0 obj << +/Type /Catalog +/Pages 216 0 R +/Outlines 217 0 R +/Names 220 0 R +/PageMode /UseOutlines +/OpenAction 53 0 R +>> endobj +222 0 obj << +/Author()/Title()/Subject()/Creator(LaTeX with hyperref package)/Producer(pdfeTeX-1.305)/Keywords() +/CreationDate (D:20070528101808+09'00') +/PTEX.Fullbanner (This is pdfeTeX using libpoppler, Version 3.141592-1.30.5-2.2 (Web2C 7.5.5) kpathsea version 3.5.5) +>> endobj +xref +0 223 +0000000001 65535 f +0000000002 00000 f +0000000003 00000 f +0000000004 00000 f +0000000000 00000 f +0000000015 00000 n +0000008276 00000 n +0000204476 00000 n +0000000062 00000 n +0000000092 00000 n +0000012574 00000 n +0000204353 00000 n +0000000139 00000 n +0000000169 00000 n +0000012756 00000 n +0000204279 00000 n +0000000217 00000 n +0000000277 00000 n +0000036973 00000 n +0000204205 00000 n +0000000325 00000 n +0000000392 00000 n +0000043467 00000 n +0000204080 00000 n +0000000440 00000 n +0000000474 00000 n +0000043649 00000 n +0000204019 00000 n +0000000522 00000 n +0000000596 00000 n +0000129726 00000 n +0000203894 00000 n +0000000644 00000 n +0000000671 00000 n +0000129908 00000 n +0000203820 00000 n +0000000719 00000 n +0000000761 00000 n +0000130090 00000 n +0000203746 00000 n +0000000809 00000 n +0000000862 00000 n +0000130272 00000 n +0000203658 00000 n +0000000910 00000 n +0000000949 00000 n +0000133095 00000 n +0000203570 00000 n +0000000998 00000 n +0000001021 00000 n +0000133276 00000 n +0000203495 00000 n +0000001070 00000 n +0000001101 00000 n +0000003064 00000 n +0000008514 00000 n +0000001151 00000 n +0000007325 00000 n +0000007384 00000 n +0000007443 00000 n +0000202404 00000 n +0000196449 00000 n +0000202245 00000 n +0000007502 00000 n +0000195943 00000 n +0000192631 00000 n +0000195783 00000 n +0000007561 00000 n +0000003374 00000 n +0000191749 00000 n +0000184345 00000 n +0000191587 00000 n +0000003527 00000 n +0000007620 00000 n +0000003678 00000 n +0000003829 00000 n +0000007679 00000 n +0000003981 00000 n +0000183052 00000 n +0000168910 00000 n +0000182893 00000 n +0000004163 00000 n +0000007739 00000 n +0000004314 00000 n +0000004504 00000 n +0000007799 00000 n +0000004656 00000 n +0000004812 00000 n +0000007857 00000 n +0000004964 00000 n +0000005161 00000 n +0000007917 00000 n +0000005313 00000 n +0000005461 00000 n +0000007976 00000 n +0000005612 00000 n +0000005777 00000 n +0000008036 00000 n +0000005929 00000 n +0000006105 00000 n +0000008096 00000 n +0000006257 00000 n +0000006419 00000 n +0000008156 00000 n +0000006572 00000 n +0000006718 00000 n +0000008216 00000 n +0000006872 00000 n +0000007026 00000 n +0000008334 00000 n +0000007180 00000 n +0000008394 00000 n +0000008454 00000 n +0000203143 00000 n +0000133565 00000 n +0000012634 00000 n +0000012816 00000 n +0000012695 00000 n +0000037033 00000 n +0000012877 00000 n +0000043527 00000 n +0000037094 00000 n +0000043709 00000 n +0000043588 00000 n +0000129786 00000 n +0000043770 00000 n +0000129968 00000 n +0000129847 00000 n +0000130150 00000 n +0000130029 00000 n +0000130332 00000 n +0000130211 00000 n +0000133155 00000 n +0000130393 00000 n +0000133335 00000 n +0000133216 00000 n +0000012938 00000 n +0000011646 00000 n +0000008620 00000 n +0000012451 00000 n +0000168540 00000 n +0000167316 00000 n +0000158822 00000 n +0000167154 00000 n +0000011809 00000 n +0000011977 00000 n +0000012512 00000 n +0000012161 00000 n +0000012306 00000 n +0000157557 00000 n +0000146908 00000 n +0000157395 00000 n +0000145613 00000 n +0000133596 00000 n +0000145450 00000 n +0000017996 00000 n +0000016122 00000 n +0000015942 00000 n +0000013073 00000 n +0000016061 00000 n +0000037155 00000 n +0000017857 00000 n +0000016219 00000 n +0000036912 00000 n +0000036768 00000 n +0000036040 00000 n +0000040123 00000 n +0000039943 00000 n +0000037287 00000 n +0000040062 00000 n +0000043831 00000 n +0000042969 00000 n +0000040220 00000 n +0000043406 00000 n +0000043116 00000 n +0000043261 00000 n +0000049437 00000 n +0000047872 00000 n +0000047692 00000 n +0000043940 00000 n +0000047811 00000 n +0000203259 00000 n +0000130454 00000 n +0000049274 00000 n +0000047969 00000 n +0000129665 00000 n +0000129084 00000 n +0000129229 00000 n +0000129374 00000 n +0000129519 00000 n +0000127950 00000 n +0000133456 00000 n +0000132596 00000 n +0000130573 00000 n +0000133034 00000 n +0000132743 00000 n +0000132889 00000 n +0000133395 00000 n +0000146369 00000 n +0000146047 00000 n +0000158293 00000 n +0000157985 00000 n +0000167931 00000 n +0000167619 00000 n +0000168287 00000 n +0000168785 00000 n +0000168810 00000 n +0000168873 00000 n +0000183859 00000 n +0000183481 00000 n +0000192295 00000 n +0000192041 00000 n +0000196268 00000 n +0000196160 00000 n +0000202876 00000 n +0000202650 00000 n +0000203352 00000 n +0000203421 00000 n +0000204548 00000 n +0000206032 00000 n +0000206071 00000 n +0000206109 00000 n +0000206238 00000 n +trailer +<< /Size 223 +/Root 221 0 R +/Info 222 0 R +/ID [<2CE8187CEE4925B938DC6EBCD91A58BB> <2CE8187CEE4925B938DC6EBCD91A58BB>] >> +startxref +206519 +%%EOF Added: trunk/Lib/sandbox/pyem/doc/user.tex =================================================================== --- trunk/Lib/sandbox/pyem/doc/user.tex 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/doc/user.tex 2007-05-28 01:32:34 UTC (rev 3051) @@ -0,0 +1,64 @@ +% Last Change: Wed Jan 31 08:00 PM 2007 J +% vim:syntax=tex + +\newcommand\at{@} +\newcommand\lb{[} +\newcommand\rb{]} +\newcommand\Cba[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\Caz[1]{\textcolor[rgb]{0.00,0.25,0.82}{#1}} +\newcommand\Cay[1]{\textcolor[rgb]{0.67,0.13,1.00}{#1}} +\newcommand\Cax[1]{\textcolor[rgb]{0.00,0.63,0.00}{#1}} +\newcommand\Cbc[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\Cas[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\Car[1]{\textcolor[rgb]{0.72,0.53,0.04}{#1}} +\newcommand\Caq[1]{\textcolor[rgb]{0.73,0.27,0.27}{\textit{#1}}} +\newcommand\Cap[1]{\textcolor[rgb]{0.72,0.53,0.04}{#1}} +\newcommand\Caw[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\Cav[1]{\textcolor[rgb]{0.60,0.60,0.60}{\textbf{#1}}} +\newcommand\Cau[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\Cat[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\Cak[1]{\textbf{#1}} +\newcommand\Caj[1]{\textcolor[rgb]{0.73,0.40,0.53}{#1}} +\newcommand\Cai[1]{\textcolor[rgb]{0.72,0.53,0.04}{#1}} +\newcommand\Cah[1]{\textcolor[rgb]{0.63,0.63,0.00}{#1}} +\newcommand\Cao[1]{\textcolor[rgb]{0.53,0.00,0.00}{#1}} +\newcommand\Can[1]{\textcolor[rgb]{0.00,0.50,0.00}{#1}} +\newcommand\Cam[1]{\textcolor[rgb]{0.73,0.40,0.13}{\textbf{#1}}} +\newcommand\Cal[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\Cac[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\Cab[1]{\textit{#1}} +\newcommand\Caa[1]{\textcolor[rgb]{0.50,0.50,0.50}{#1}} +\newcommand\Cag[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\Caf[1]{\textcolor[rgb]{0.00,0.53,0.00}{\textit{#1}}} +\newcommand\Cae[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\Cad[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\Cbb[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\CaZ[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\CaY[1]{\textcolor[rgb]{0.00,0.00,0.50}{\textbf{#1}}} +\newcommand\CaX[1]{\textcolor[rgb]{0.00,0.50,0.00}{\textbf{#1}}} +\newcommand\Cbd[1]{\textcolor[rgb]{0.73,0.40,0.53}{\textbf{#1}}} +\newcommand\Cbe[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\CaS[1]{\textcolor[rgb]{0.50,0.00,0.50}{\textbf{#1}}} +\newcommand\CaR[1]{\textcolor[rgb]{0.00,0.53,0.00}{\textit{#1}}} +\newcommand\CaQ[1]{\textcolor[rgb]{0.72,0.53,0.04}{#1}} +\newcommand\CaP[1]{\textcolor[rgb]{0.40,0.40,0.40}{#1}} +\newcommand\CaW[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\CaV[1]{\textcolor[rgb]{0.67,0.13,1.00}{#1}} +\newcommand\CaU[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\CaT[1]{\textcolor[rgb]{0.00,0.00,1.00}{\textbf{#1}}} +\newcommand\CaK[1]{\textcolor[rgb]{0.67,0.13,1.00}{#1}} +\newcommand\CaJ[1]{\textcolor[rgb]{0.00,0.63,0.00}{#1}} +\newcommand\CaI[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\CaH[1]{\textcolor[rgb]{0.67,0.13,1.00}{\textbf{#1}}} +\newcommand\CaO[1]{\textcolor[rgb]{0.73,0.27,0.27}{#1}} +\newcommand\CaN[1]{\textcolor[rgb]{0.00,0.00,0.50}{\textbf{#1}}} +\newcommand\CaM[1]{\textcolor[rgb]{0.00,0.00,1.00}{#1}} +\newcommand\CaL[1]{\textcolor[rgb]{0.00,0.53,0.00}{#1}} +\newcommand\CaC[1]{\textcolor[rgb]{0.00,0.53,0.00}{\textit{#1}}} +\newcommand\CaB[1]{\textcolor[rgb]{0.82,0.25,0.23}{\textbf{#1}}} +\newcommand\CaA[1]{\textcolor[rgb]{0.67,0.13,1.00}{#1}} +\newcommand\CaG[1]{\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{#1}} +\newcommand\CaF[1]{\textcolor[rgb]{0.72,0.53,0.04}{#1}} +\newcommand\CaE[1]{\textcolor[rgb]{1.00,0.00,0.00}{#1}} +\newcommand\CaD[1]{\textcolor[rgb]{0.63,0.00,0.00}{#1}} + Deleted: trunk/Lib/sandbox/pyem/examples.py =================================================================== --- trunk/Lib/sandbox/pyem/examples.py 2007-05-27 10:58:24 UTC (rev 3050) +++ trunk/Lib/sandbox/pyem/examples.py 2007-05-28 01:32:34 UTC (rev 3051) @@ -1,14 +0,0 @@ -def ex1(): - import basic_example1 - -def ex2(): - import basic_example2 - -def ex3(): - import basic_example3 - -if __name__ == '__main__': - ex1() - ex2() - ex3() - From scipy-svn at scipy.org Sun May 27 21:44:16 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Sun, 27 May 2007 20:44:16 -0500 (CDT) Subject: [Scipy-svn] r3052 - trunk/Lib/sandbox/pyem/doc Message-ID: <20070528014416.2499939C074@new.scipy.org> Author: cdavid Date: 2007-05-27 20:44:11 -0500 (Sun, 27 May 2007) New Revision: 3052 Modified: trunk/Lib/sandbox/pyem/doc/tutorial.pdf Log: Set pdf tutorial svn properties so that it is correctly handled by services such as inline diff in emails Property changes on: trunk/Lib/sandbox/pyem/doc/tutorial.pdf ___________________________________________________________________ Name: svn:mime-type + application/pdf From scipy-svn at scipy.org Mon May 28 03:15:19 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 28 May 2007 02:15:19 -0500 (CDT) Subject: [Scipy-svn] r3053 - in trunk/Lib/sandbox/pyem: . doc/examples examples src tests Message-ID: <20070528071519.EF3CC39C073@new.scipy.org> Author: cdavid Date: 2007-05-28 02:14:50 -0500 (Mon, 28 May 2007) New Revision: 3053 Added: trunk/Lib/sandbox/pyem/doc/examples/demo1.py trunk/Lib/sandbox/pyem/doc/examples/demo2.py trunk/Lib/sandbox/pyem/examples/ trunk/Lib/sandbox/pyem/examples/__init__.py trunk/Lib/sandbox/pyem/examples/basic_example1.py trunk/Lib/sandbox/pyem/examples/basic_example2.py trunk/Lib/sandbox/pyem/examples/basic_example3.py trunk/Lib/sandbox/pyem/examples/examples.py Removed: trunk/Lib/sandbox/pyem/demo1.py trunk/Lib/sandbox/pyem/demo2.py trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py trunk/Lib/sandbox/pyem/doc/examples/examples.py Modified: trunk/Lib/sandbox/pyem/Changelog trunk/Lib/sandbox/pyem/TODO trunk/Lib/sandbox/pyem/__init__.py trunk/Lib/sandbox/pyem/info.py trunk/Lib/sandbox/pyem/src/pure_den.c trunk/Lib/sandbox/pyem/tests/test_densities.py trunk/Lib/sandbox/pyem/tests/test_examples.py trunk/Lib/sandbox/pyem/tests/test_online_em.py Log: Put examples directory at the top level, so that they be easily imported for tests. Modified: trunk/Lib/sandbox/pyem/Changelog =================================================================== --- trunk/Lib/sandbox/pyem/Changelog 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/Changelog 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,3 +1,7 @@ +pyem (0.5.7dev) Mon, 28 May 2007 11:31:08 +0900 + + * Put doc into its own directory + pyem (0.5.6) Thu, 16 Nov 2006 21:02:02 +0900 * correct examples Modified: trunk/Lib/sandbox/pyem/TODO =================================================================== --- trunk/Lib/sandbox/pyem/TODO 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/TODO 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,8 +1,11 @@ -# Last Change: Thu Nov 09 06:00 PM 2006 J +# Last Change: Mon May 28 11:00 AM 2007 J + Things which must be implemented for a 1.0 version (in importante order) - A classifier - basic regularization + - Use scipy.cluster kmeans instead of our own, as it now provides all + necessary functionalities. Things which would be nice (after 1.0 version): - Bayes prior (hard, suppose MCMC) Modified: trunk/Lib/sandbox/pyem/__init__.py =================================================================== --- trunk/Lib/sandbox/pyem/__init__.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/__init__.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,12 +1,12 @@ #! /usr/bin/env python -# Last Change: Thu Nov 16 09:00 PM 2006 J +# Last Change: Mon May 28 01:00 PM 2007 J from info import __doc__ from gauss_mix import GmParamError, GM from gmm_em import GmmParamError, GMM, EM -from online_em import OnGMM as _OnGMM -import examples as _examples +#from online_em import OnGMM as _OnGMM +#import examples as _examples __all__ = filter(lambda s:not s.startswith('_'),dir()) Deleted: trunk/Lib/sandbox/pyem/demo1.py =================================================================== --- trunk/Lib/sandbox/pyem/demo1.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/demo1.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,109 +0,0 @@ -#! /usr/bin/env python - -# Example of use of pyem toolbox. Feel free to change parameters -# such as dimension, number of components, mode of covariance. -# -# You can also try less trivial things such as adding outliers, sampling -# a mixture with full covariance and estimating it with a mixture with diagonal -# gaussians (replace the mode of the learned model lgm) -# -# Later, I hope to add functions for number of component estimation using eg BIC - -import numpy as N -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -seed(1) -#+++++++++++++++++++++++++++++ -# Meta parameters of the model -# - k: Number of components -# - d: dimension of each Gaussian -# - mode: Mode of covariance matrix: full or diag (string) -# - nframes: number of frames (frame = one data point = one -# row of d elements) -k = 2 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.5) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -# Init the model -lgm = GM(d, k, mode) -gmm = GMM(lgm, 'kmean') -gmm.init(data) - -# Keep a copy for drawing later -gm0 = copy.copy(lgm) - -# The actual EM, with likelihood computation. The threshold -# is compared to the (linearly appromixated) derivative of the likelihood -em = EM() -like = em.train(data, gmm, maxiter = 30, thresh = 1e-8) - -#+++++++++++++++ -# Draw the model -#+++++++++++++++ -import pylab as P -P.subplot(2, 1, 1) - -# Level is the confidence level for confidence ellipsoids: 1.0 means that -# all points will be (almost surely) inside the ellipsoid -level = 0.8 -if not d == 1: - P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') - - # h keeps the handles of the plot, so that you can modify - # its parameters like label or color - h = gm.plot(level = level) - [i.set_color('g') for i in h] - h[0].set_label('true confidence ellipsoides') - - # Initial confidence ellipses as found by kmean - h = gm0.plot(level = level) - [i.set_color('k') for i in h] - h[0].set_label('kmean confidence ellipsoides') - - # Values found by EM - h = lgm.plot(level = level) - [i.set_color('r') for i in h] - h[0].set_label('EM confidence ellipsoides') - - P.legend(loc = 0) -else: - # The 1d plotting function is quite elaborate: the confidence - # interval are represented by filled areas, the pdf of the mixture and - # the pdf of each component is drawn (optional) - h = gm.plot1d(level = level) - [i.set_color('g') for i in h['pdf']] - h['pdf'][0].set_label('true pdf') - - h0 = gm0.plot1d(level = level) - [i.set_color('k') for i in h0['pdf']] - h0['pdf'][0].set_label('initial pdf') - - hl = lgm.plot1d(fill = 1, level = level) - [i.set_color('r') for i in hl['pdf']] - hl['pdf'][0].set_label('pdf found by EM') - - P.legend(loc = 0) - -P.subplot(2, 1, 2) -P.plot(like) -P.title('log likelihood') - -P.show() -# P.save('2d diag.png') Deleted: trunk/Lib/sandbox/pyem/demo2.py =================================================================== --- trunk/Lib/sandbox/pyem/demo2.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/demo2.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,104 +0,0 @@ -#! /usr/bin/env python - -# Example of use of pyem toolbox. Feel free to change parameters -# such as dimension, number of components, mode of covariance. -# -# You can also try less trivial things such as adding outliers, sampling -# a mixture with full covariance and estimating it with a mixture with diagonal -# gaussians (replace the mode of the learned model lgm) -# -# Later, I hope to add functions for number of component estimation using eg BIC - -import numpy as N -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -seed(2) -#+++++++++++++++++++++++++++++ -# Meta parameters of the model -# - k: Number of components -# - d: dimension of each Gaussian -# - mode: Mode of covariance matrix: full or diag (string) -# - nframes: number of frames (frame = one data point = one -# row of d elements) -k = 4 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GMM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.0) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -lgm = [] -kmax = 6 -bics = N.zeros(kmax) -for i in range(kmax): - # Init the model with an empty Gaussian Mixture, and create a Gaussian - # Mixture Model from it - lgm.append(GM(d, i+1, mode)) - gmm = GMM(lgm[i], 'kmean') - - # The actual EM, with likelihood computation. The threshold - # is compared to the (linearly appromixated) derivative of the likelihood - em = EM() - em.train(data, gmm, maxiter = 30, thresh = 1e-10) - bics[i] = gmm.bic(data) - -print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) - -#+++++++++++++++ -# Draw the model -#+++++++++++++++ -import pylab as P -P.subplot(3, 2, 1) - -for k in range(kmax): - P.subplot(3, 2, k+1) - # Level is the confidence level for confidence ellipsoids: 1.0 means that - # all points will be (almost surely) inside the ellipsoid - level = 0.8 - if not d == 1: - P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') - - # h keeps the handles of the plot, so that you can modify - # its parameters like label or color - h = lgm[k].plot(level = level) - [i.set_color('r') for i in h] - h[0].set_label('EM confidence ellipsoides') - - h = gm.plot(level = level) - [i.set_color('g') for i in h] - h[0].set_label('Real confidence ellipsoides') - else: - # The 1d plotting function is quite elaborate: the confidence - # interval are represented by filled areas, the pdf of the mixture and - # the pdf of each component is drawn (optional) - h = gm.plot1d(level = level) - [i.set_color('g') for i in h['pdf']] - h['pdf'][0].set_label('true pdf') - - h0 = gm0.plot1d(level = level) - [i.set_color('k') for i in h0['pdf']] - h0['pdf'][0].set_label('initial pdf') - - hl = lgm.plot1d(fill = 1, level = level) - [i.set_color('r') for i in hl['pdf']] - hl['pdf'][0].set_label('pdf found by EM') - - P.legend(loc = 0) - -P.legend(loc = 0) -P.show() -# P.save('2d diag.png') Deleted: trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,48 +0,0 @@ -import numpy as N -import pylab as P -from scipy.sandbox.pyem import GM - -#------------------------------ -# Hyper parameters: -# - K: number of clusters -# - d: dimension -k = 3 -d = 2 - -#------------------------------------------------------- -# Values for weights, mean and (diagonal) variances -# - the weights are an array of rank 1 -# - mean is expected to be rank 2 with one row for one component -# - variances are also expteced to be rank 2. For diagonal, one row -# is one diagonal, for full, the first d rows are the first variance, -# etc... In this case, the variance matrix should be k*d rows and d -# colums -w = N.array([0.2, 0.45, 0.35]) -mu = N.array([[4.1, 3], [1, 5], [-2, -3]]) -va = N.array([[1, 1.5], [3, 4], [2, 3.5]]) - -#----------------------------------------- -# First method: directly from parameters: -# Both methods are equivalents. -gm = GM.fromvalues(w, mu, va) - -#------------------------------------- -# Second method to build a GM instance: -gm = GM(d, k, mode = 'diag') -# The set_params checks that w, mu, and va corresponds to k, d and m -gm.set_param(w, mu, va) - -# Once set_params is called, both methods are equivalent. The 2d -# method is useful when using a GM object for learning (where -# the learner class will set the params), whereas the first one -# is useful when there is a need to quickly sample a model -# from existing values, without a need to give the hyper parameters - -# Create a Gaussian Mixture from the parameters, and sample -# 1000 items from it (one row = one 2 dimension sample) -data = gm.sample(1000) - -# Plot the samples -P.plot(data[:, 0], data[:, 1], '.') -# Plot the ellipsoids of confidence with a level a 75 % -gm.plot(level = 0.75) Deleted: trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,45 +0,0 @@ -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -# To reproduce results, fix the random seed -seed(1) - -#+++++++++++++++++++++++++++++ -# Meta parameters of the model -# - k: Number of components -# - d: dimension of each Gaussian -# - mode: Mode of covariance matrix: full or diag (string) -# - nframes: number of frames (frame = one data point = one -# row of d elements) -k = 2 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.5) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -# Create a Model from a Gaussian mixture with kmean initialization -lgm = GM(d, k, mode) -gmm = GMM(lgm, 'kmean') - -# The actual EM, with likelihood computation. The threshold -# is compared to the (linearly appromixated) derivative of the likelihood -em = EM() -like = em.train(data, gmm, maxiter = 30, thresh = 1e-8) - -# The computed parameters are in gmm.gm, which is the same than lgm -# (remember, python does not copy most objects by default). You can for example -# plot lgm against gm to compare Deleted: trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,64 +0,0 @@ -import numpy as N -from numpy.random import seed - -from scipy.sandbox.pyem import GM, GMM, EM -import copy - -seed(2) - -k = 4 -d = 2 -mode = 'diag' -nframes = 1e3 - -#+++++++++++++++++++++++++++++++++++++++++++ -# Create an artificial GMM model, samples it -#+++++++++++++++++++++++++++++++++++++++++++ -w, mu, va = GM.gen_param(d, k, mode, spread = 1.0) -gm = GM.fromvalues(w, mu, va) - -# Sample nframes frames from the model -data = gm.sample(nframes) - -#++++++++++++++++++++++++ -# Learn the model with EM -#++++++++++++++++++++++++ - -# List of learned mixtures lgm[i] is a mixture with i+1 components -lgm = [] -kmax = 6 -bics = N.zeros(kmax) -em = EM() -for i in range(kmax): - lgm.append(GM(d, i+1, mode)) - - gmm = GMM(lgm[i], 'kmean') - em.train(data, gmm, maxiter = 30, thresh = 1e-10) - bics[i] = gmm.bic(data) - -print "Original model has %d clusters, bics says %d" % (k, N.argmax(bics)+1) - -#+++++++++++++++ -# Draw the model -#+++++++++++++++ -import pylab as P -P.subplot(3, 2, 1) - -for k in range(kmax): - P.subplot(3, 2, k+1) - level = 0.9 - P.plot(data[:, 0], data[:, 1], '.', label = '_nolegend_') - - # h keeps the handles of the plot, so that you can modify - # its parameters like label or color - h = lgm[k].plot(level = level) - [i.set_color('r') for i in h] - h[0].set_label('EM confidence ellipsoides') - - h = gm.plot(level = level) - [i.set_color('g') for i in h] - h[0].set_label('Real confidence ellipsoides') - -P.legend(loc = 0) -# depending on your configuration, you may have to call P.show() -# to actually display the figure Copied: trunk/Lib/sandbox/pyem/doc/examples/demo1.py (from rev 3021, trunk/Lib/sandbox/pyem/demo1.py) Copied: trunk/Lib/sandbox/pyem/doc/examples/demo2.py (from rev 3021, trunk/Lib/sandbox/pyem/demo2.py) Deleted: trunk/Lib/sandbox/pyem/doc/examples/examples.py =================================================================== --- trunk/Lib/sandbox/pyem/doc/examples/examples.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/doc/examples/examples.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,14 +0,0 @@ -def ex1(): - import basic_example1 - -def ex2(): - import basic_example2 - -def ex3(): - import basic_example3 - -if __name__ == '__main__': - ex1() - ex2() - ex3() - Added: trunk/Lib/sandbox/pyem/examples/__init__.py =================================================================== Copied: trunk/Lib/sandbox/pyem/examples/basic_example1.py (from rev 3051, trunk/Lib/sandbox/pyem/doc/examples/basic_example1.py) Copied: trunk/Lib/sandbox/pyem/examples/basic_example2.py (from rev 3051, trunk/Lib/sandbox/pyem/doc/examples/basic_example2.py) Copied: trunk/Lib/sandbox/pyem/examples/basic_example3.py (from rev 3051, trunk/Lib/sandbox/pyem/doc/examples/basic_example3.py) Copied: trunk/Lib/sandbox/pyem/examples/examples.py (from rev 3051, trunk/Lib/sandbox/pyem/doc/examples/examples.py) Modified: trunk/Lib/sandbox/pyem/info.py =================================================================== --- trunk/Lib/sandbox/pyem/info.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/info.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -60,7 +60,7 @@ Copyright: David Cournapeau 2006 License: BSD-style (see LICENSE.txt in main source directory) """ -version = '0.5.6' +version = '0.5.7dev' depends = ['linalg', 'stats'] ignore = False Modified: trunk/Lib/sandbox/pyem/src/pure_den.c =================================================================== --- trunk/Lib/sandbox/pyem/src/pure_den.c 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/src/pure_den.c 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,5 +1,8 @@ /* - * Last Change: Wed Dec 06 08:00 PM 2006 J + * Last Change: Mon May 28 01:00 PM 2007 J + * + * Pure C module because ctypes cannot be used here for performance reasons + * (function calls are the primary bottleneck) */ #include #include Modified: trunk/Lib/sandbox/pyem/tests/test_densities.py =================================================================== --- trunk/Lib/sandbox/pyem/tests/test_densities.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/tests/test_densities.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Last Change: Thu Nov 09 05:00 PM 2006 J +# Last Change: Mon May 28 01:00 PM 2007 J # TODO: # - having "fake tests" to check that all mode (scalar, diag and full) are Modified: trunk/Lib/sandbox/pyem/tests/test_examples.py =================================================================== --- trunk/Lib/sandbox/pyem/tests/test_examples.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/tests/test_examples.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,10 +1,10 @@ #! /usr/bin/env python -# Last Change: Thu Nov 16 09:00 PM 2006 J +# Last Change: Mon May 28 04:00 PM 2007 J from numpy.testing import * set_package_path() -from pyem.examples import ex1, ex2, ex3 +from examples.examples import ex1, ex2, ex3 restore_path() # #Optional: Modified: trunk/Lib/sandbox/pyem/tests/test_online_em.py =================================================================== --- trunk/Lib/sandbox/pyem/tests/test_online_em.py 2007-05-28 01:44:11 UTC (rev 3052) +++ trunk/Lib/sandbox/pyem/tests/test_online_em.py 2007-05-28 07:14:50 UTC (rev 3053) @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Last Change: Wed Dec 06 09:00 PM 2006 J +# Last Change: Mon May 28 01:00 PM 2007 J import copy From scipy-svn at scipy.org Mon May 28 07:24:27 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Mon, 28 May 2007 06:24:27 -0500 (CDT) Subject: [Scipy-svn] r3054 - in trunk/Lib/linalg: . tests Message-ID: <20070528112427.0F71A39C073@new.scipy.org> Author: stefan Date: 2007-05-28 06:24:04 -0500 (Mon, 28 May 2007) New Revision: 3054 Modified: trunk/Lib/linalg/decomp.py trunk/Lib/linalg/generic_flapack.pyf trunk/Lib/linalg/tests/test_decomp.py Log: Add rq decomposition [patch by Andrew Straw]. Modified: trunk/Lib/linalg/decomp.py =================================================================== --- trunk/Lib/linalg/decomp.py 2007-05-28 07:14:50 UTC (rev 3053) +++ trunk/Lib/linalg/decomp.py 2007-05-28 11:24:04 UTC (rev 3054) @@ -7,9 +7,10 @@ # additions by Eric Jones, June 2002 # additions by Johannes Loehnert, June 2006 # additions by Bart Vandereycken, June 2006 +# additions by Andrew D Straw, May 2007 __all__ = ['eig','eigh','eig_banded','eigvals','eigvalsh', 'eigvals_banded', - 'lu','svd','svdvals','diagsvd','cholesky','qr','qr_old', + 'lu','svd','svdvals','diagsvd','cholesky','qr','qr_old','rq', 'schur','rsf2csf','lu_factor','cho_factor','cho_solve','orth', 'hessenberg'] @@ -23,7 +24,7 @@ from scipy.linalg import calc_lwork import numpy from numpy import array, asarray_chkfinite, asarray, diag, zeros, ones, \ - single, isfinite, inexact + single, isfinite, inexact, complexfloating cast = numpy.cast r_ = numpy.r_ @@ -592,8 +593,8 @@ Description: - Find a unitary matrix, q, and an upper-trapezoidal matrix r - such that q * r = a + Find a unitary (orthogonal) matrix, q, and an upper-triangular + matrix r such that q * r = a Inputs: @@ -675,8 +676,8 @@ Description: - Find a unitary matrix, q, and an upper-trapezoidal matrix r - such that q * r = a + Find a unitary (orthogonal) matrix, q, and an upper-triangular + matrix r such that q * r = a Inputs: @@ -719,6 +720,64 @@ Q = gemm(1,Q,H) return Q, R + + +def rq(a,overwrite_a=0,lwork=None): + """RQ decomposition of an M x N matrix a. + + Description: + + Find an upper-triangular matrix r and a unitary (orthogonal) + matrix q such that r * q = a + + Inputs: + + a -- the matrix + overwrite_a=0 -- if non-zero then discard the contents of a, + i.e. a is used as a work array if possible. + + lwork=None -- >= shape(a)[1]. If None (or -1) compute optimal + work array size. + + Outputs: + + r, q -- matrices such that r * q = a + + """ + # TODO: implement support for non-square and complex arrays + a1 = asarray_chkfinite(a) + if len(a1.shape) != 2: + raise ValueError, 'expected matrix' + M,N = a1.shape + if M != N: + raise ValueError, 'expected square matrix' + if issubclass(a1.dtype.type,complexfloating): + raise ValueError, 'expected real (non-complex) matrix' + overwrite_a = overwrite_a or (_datanotshared(a1,a)) + gerqf, = get_lapack_funcs(('gerqf',),(a1,)) + if lwork is None or lwork == -1: + # get optimal work array + rq,tau,work,info = gerqf(a1,lwork=-1,overwrite_a=1) + lwork = work[0] + rq,tau,work,info = gerqf(a1,lwork=lwork,overwrite_a=overwrite_a) + if info<0: raise ValueError, \ + 'illegal value in %-th argument of internal geqrf'%(-info) + gemm, = get_blas_funcs(('gemm',),(rq,)) + t = rq.dtype.char + R = basic.triu(rq) + Q = numpy.identity(M,dtype=t) + ident = numpy.identity(M,dtype=t) + zeros = numpy.zeros + + k = min(M,N) + for i in range(k): + v = zeros((M,),t) + v[N-k+i] = 1 + v[0:N-k+i] = rq[M-k+i,0:N-k+i] + H = gemm(-tau[i],v,v,1+0j,ident,trans_b=2) + Q = gemm(1,Q,H) + return R, Q + _double_precision = ['i','l','d'] def schur(a,output='real',lwork=None,overwrite_a=0): Modified: trunk/Lib/linalg/generic_flapack.pyf =================================================================== --- trunk/Lib/linalg/generic_flapack.pyf 2007-05-28 07:14:50 UTC (rev 3053) +++ trunk/Lib/linalg/generic_flapack.pyf 2007-05-28 11:24:04 UTC (rev 3054) @@ -439,6 +439,25 @@ integer intent(out) :: info end subroutine geqrf + subroutine gerqf(m,n,a,tau,work,lwork,info) + + ! rq_a,tau,work,info = gerqf(a,lwork=3*n,overwrite_a=0) + ! Compute an RQ factorization of a real M-by-N matrix A: + ! A = R * Q. + + callstatement (*f2py_func)(&m,&n,a,&m,tau,work,&lwork,&info) + callprotoargument int*,int*,*,int*,*,*,int*,int* + + integer intent(hide),depend(a):: m = shape(a,0) + integer intent(hide),depend(a):: n = shape(a,1) + dimension(m,n),intent(in,out,copy,out=qr) :: a + dimension(MIN(m,n)),intent(out) :: tau + + integer optional,intent(in),depend(n),check(lwork>=n||lwork==-1) :: lwork=3*n + dimension(MAX(lwork,1)),intent(out),depend(lwork) :: work + integer intent(out) :: info + end subroutine gerqf + subroutine orgqr(m,n,k,a,tau,work,lwork,info) ! q,work,info = orgqr(a,lwork=3*n,overwrite_a=0) Modified: trunk/Lib/linalg/tests/test_decomp.py =================================================================== --- trunk/Lib/linalg/tests/test_decomp.py 2007-05-28 07:14:50 UTC (rev 3053) +++ trunk/Lib/linalg/tests/test_decomp.py 2007-05-28 11:24:04 UTC (rev 3054) @@ -18,11 +18,11 @@ from numpy.testing import * set_package_path() -from linalg import eig,eigvals,lu,svd,svdvals,cholesky,qr,schur,rsf2csf -from linalg import lu_solve,lu_factor,solve,diagsvd,hessenberg -from linalg import eig_banded,eigvals_banded -from linalg.flapack import dgbtrf, dgbtrs, zgbtrf, zgbtrs -from linalg.flapack import dsbev, dsbevd, dsbevx, zhbevd, zhbevx +from scipy.linalg import eig,eigvals,lu,svd,svdvals,cholesky,qr,schur,rsf2csf +from scipy.linalg import lu_solve,lu_factor,solve,diagsvd,hessenberg,rq +from scipy.linalg import eig_banded,eigvals_banded +from scipy.linalg.flapack import dgbtrf, dgbtrs, zgbtrf, zgbtrs +from scipy.linalg.flapack import dsbev, dsbevd, dsbevx, zhbevd, zhbevx restore_path() @@ -674,6 +674,68 @@ assert_array_almost_equal(dot(conj(transpose(q)),q),identity(n)) assert_array_almost_equal(dot(q,r),a) +class test_rq(NumpyTestCase): + + def check_simple(self): + a = [[8,2,3],[2,9,3],[5,3,6]] + r,q = rq(a) + assert_array_almost_equal(dot(transpose(q),q),identity(3)) + assert_array_almost_equal(dot(r,q),a) + + def check_random(self): + n = 20 + for k in range(2): + a = random([n,n]) + r,q = rq(a) + assert_array_almost_equal(dot(transpose(q),q),identity(n)) + assert_array_almost_equal(dot(r,q),a) + +# TODO: implement support for non-square and complex arrays + +## def check_simple_trap(self): +## a = [[8,2,3],[2,9,3]] +## r,q = rq(a) +## assert_array_almost_equal(dot(transpose(q),q),identity(2)) +## assert_array_almost_equal(dot(r,q),a) + +## def check_simple_tall(self): +## a = [[8,2],[2,9],[5,3]] +## r,q = rq(a) +## assert_array_almost_equal(dot(transpose(q),q),identity(3)) +## assert_array_almost_equal(dot(r,q),a) + +## def check_simple_complex(self): +## a = [[3,3+4j,5],[5,2,2+7j],[3,2,7]] +## r,q = rq(a) +## assert_array_almost_equal(dot(conj(transpose(q)),q),identity(3)) +## assert_array_almost_equal(dot(r,q),a) + +## def check_random_tall(self): +## m = 200 +## n = 100 +## for k in range(2): +## a = random([m,n]) +## r,q = rq(a) +## assert_array_almost_equal(dot(transpose(q),q),identity(m)) +## assert_array_almost_equal(dot(r,q),a) + +## def check_random_trap(self): +## m = 100 +## n = 200 +## for k in range(2): +## a = random([m,n]) +## r,q = rq(a) +## assert_array_almost_equal(dot(transpose(q),q),identity(m)) +## assert_array_almost_equal(dot(r,q),a) + +## def check_random_complex(self): +## n = 20 +## for k in range(2): +## a = random([n,n])+1j*random([n,n]) +## r,q = rq(a) +## assert_array_almost_equal(dot(conj(transpose(q)),q),identity(n)) +## assert_array_almost_equal(dot(r,q),a) + transp = transpose any = sometrue @@ -741,7 +803,7 @@ class test_datanotshared(NumpyTestCase): def check_datanotshared(self): - from linalg.decomp import _datanotshared + from scipy.linalg.decomp import _datanotshared M = matrix([[0,1],[2,3]]) A = asarray(M) From scipy-svn at scipy.org Tue May 29 05:10:55 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 29 May 2007 04:10:55 -0500 (CDT) Subject: [Scipy-svn] r3055 - in trunk/Lib/special: . tests Message-ID: <20070529091055.E440739C104@new.scipy.org> Author: stefan Date: 2007-05-29 04:10:20 -0500 (Tue, 29 May 2007) New Revision: 3055 Modified: trunk/Lib/special/basic.py trunk/Lib/special/tests/test_basic.py Log: Fix special functions for real arguments (ticket #387). Modified: trunk/Lib/special/basic.py =================================================================== --- trunk/Lib/special/basic.py 2007-05-28 11:24:04 UTC (rev 3054) +++ trunk/Lib/special/basic.py 2007-05-29 09:10:20 UTC (rev 3055) @@ -216,7 +216,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)): + if iscomplex(z): nm,jn,jnp,yn,ynp = specfun.csphjy(n1,z) else: nm,jn,jnp = specfun.sphj(n1,z) @@ -232,7 +232,8 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)) or any(z<0): + print z + if iscomplex(z) or less(z,0): nm,jn,jnp,yn,ynp = specfun.csphjy(n1,z) else: nm,yn,ynp = specfun.sphy(n1,z) @@ -248,7 +249,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)) or any(z<0): + if iscomplex(z) or less(z,0): nm,jn,jnp,yn,ynp = specfun.csphjy(n1,z) else: nm,yn,ynp = specfun.sphy(n1,z) @@ -265,7 +266,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)): + if iscomplex(z): nm,In,Inp,kn,knp = specfun.csphik(n1,z) else: nm,In,Inp = specfun.sphi(n1,z) @@ -281,7 +282,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)) or any(z<0): + if iscomplex(z) or less(z,0): nm,In,Inp,kn,knp = specfun.csphik(n1,z) else: nm,kn,knp = specfun.sphk(n1,z) @@ -295,7 +296,7 @@ raise ValueError, "arguments must be scalars." if (n!= floor(n)) or (n<0): raise ValueError, "n must be a non-negative integer." - if any(iscomplex(z)) or any(z<0): + if iscomplex(z) or less(z,0): nm,In,Inp,kn,knp = specfun.csphik(n,z) else: nm,In,Inp = specfun.sphi(n,z) @@ -508,7 +509,7 @@ sv = errprint(sv) else: mp = m - if any(iscomplex(z)): + if iscomplex(z): p,pd = specfun.clpmn(mp,n,real(z),imag(z)) else: p,pd = specfun.lpmn(mp,n,z) @@ -538,7 +539,7 @@ if (m*n == 0): mm = max(1,m) nn = max(1,n) - if any(iscomplex(z)): + if iscomplex(z): q,qd = specfun.clqmn(mm,nn,z) else: q,qd = specfun.lqmn(mm,nn,z) @@ -577,7 +578,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)): + if iscomplex(z): pn,pd = specfun.clpn(n1,z) else: pn,pd = specfun.lpn(n1,z) @@ -595,7 +596,7 @@ raise ValueError, "n must be a non-negative integer." if (n < 1): n1 = 1 else: n1 = n - if any(iscomplex(z)): + if iscomplex(z): qn,qd = specfun.clqn(n1,z) else: qn,qd = specfun.lqnb(n1,z) Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-28 11:24:04 UTC (rev 3054) +++ trunk/Lib/special/tests/test_basic.py 2007-05-29 09:10:20 UTC (rev 3055) @@ -37,7 +37,7 @@ from numpy.testing import * set_package_path() from special import * -import special._cephes as cephes +import scipy.special._cephes as cephes restore_path() From scipy-svn at scipy.org Tue May 29 08:45:44 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 29 May 2007 07:45:44 -0500 (CDT) Subject: [Scipy-svn] r3056 - in trunk/Lib/special: . tests Message-ID: <20070529124544.CBFAE39C0C9@new.scipy.org> Author: stefan Date: 2007-05-29 07:45:16 -0500 (Tue, 29 May 2007) New Revision: 3056 Modified: trunk/Lib/special/basic.py trunk/Lib/special/specfun.pyf trunk/Lib/special/tests/test_basic.py Log: Fix memory error in special functions wrapper. Reorganise tests. Modified: trunk/Lib/special/basic.py =================================================================== --- trunk/Lib/special/basic.py 2007-05-29 09:10:20 UTC (rev 3055) +++ trunk/Lib/special/basic.py 2007-05-29 12:45:16 UTC (rev 3056) @@ -661,11 +661,11 @@ raise ValueError, "arguments must be scalars." n = int(v) v0 = v-n - if (n < 2): n1=2 + if (n < 1): n1=1 else: n1 = n v1 = n1 + v0 dv,dp,pdf,pdd = specfun.pbdv(v1,x) - return dv[:(n1+1)],dp[:(n1+1)] + return dv[:n1+1],dp[:n1+1] def pbvv_seq(v,x): """Compute sequence of parabolic cylinder functions Dv(x) and @@ -675,11 +675,11 @@ raise ValueError, "arguments must be scalars." n = int(v) v0 = v-n - if (n < 2): n1=2 + if (n <= 1): n1=1 else: n1 = n v1 = n1 + v0 dv,dp,pdf,pdd = specfun.pbvv(v1,x) - return dv[:(n1+1)],dp[:(n1+1)] + return dv[:n1+1],dp[:n1+1] def pbdn_seq(n,z): """Compute sequence of parabolic cylinder functions Dn(z) and @@ -690,11 +690,11 @@ if (floor(n)!=n): raise ValueError, "n must be an integer." if (abs(n) <= 1): - n1 = 2 + n1 = 1 else: n1 = n cpb,cpd = specfun.cpbdn(n1,z) - return cpb[:n+1],cpd[:n+1] + return cpb[:n1+1],cpd[:n1+1] def ber_zeros(nt): """Compute nt zeros of the kelvin function ber x Modified: trunk/Lib/special/specfun.pyf =================================================================== --- trunk/Lib/special/specfun.pyf 2007-05-29 09:10:20 UTC (rev 3055) +++ trunk/Lib/special/specfun.pyf 2007-05-29 12:45:16 UTC (rev 3056) @@ -184,10 +184,10 @@ ! jyna subroutine pbdv(v,x,dv,dp,pdf,pdd) ! in :specfun:specfun.f - double precision intent(in),check((abs((int)v)+1)>=2) :: v + double precision intent(in),check((abs((int)v)+2)>=2) :: v double precision intent(in) :: x - double precision intent(out),depend(v),dimension(abs((int)v)+1) :: dv - double precision intent(out),depend(v),dimension(abs((int)v)+1) :: dp + double precision intent(out),depend(v),dimension(abs((int)v)+2) :: dv + double precision intent(out),depend(v),dimension(abs((int)v)+2) :: dp double precision intent(out) :: pdf double precision intent(out) :: pdd end subroutine pbdv @@ -307,10 +307,10 @@ ! dvla ! ik01a subroutine cpbdn(n,z,cpb,cpd) ! in :specfun:specfun.f - integer intent(in), check((abs(n)) > 1) :: n + integer intent(in), check((abs(n)) >= 1) :: n complex*16 intent(in) :: z - complex*16 depend(n), intent(out), dimension(abs(n)+1) :: cpb - complex*16 depend(n), intent(out), dimension(abs(n)+1) :: cpd + complex*16 depend(n), intent(out), dimension(abs(n)+2) :: cpb + complex*16 depend(n), intent(out), dimension(abs(n)+2) :: cpd end subroutine cpbdn ! ik01b ! beta @@ -447,10 +447,10 @@ ! enxa ! gaih subroutine pbvv(v,x,vv,vp,pvf,pvd) ! in :specfun:specfun.f - double precision intent(in), check((abs((int)v)+1)>=2) :: v + double precision intent(in), check((abs((int)v)+2)>=2) :: v double precision intent(in) :: x - double precision intent(out),depend(v),dimension(abs((int)v)+1) :: vv - double precision intent(out),depend(v),dimension(abs((int)v)+1) :: vp + double precision intent(out),depend(v),dimension(abs((int)v)+2) :: vv + double precision intent(out),depend(v),dimension(abs((int)v)+2) :: vp double precision intent(out) :: pvf double precision intent(out) :: pvd end subroutine pbvv Modified: trunk/Lib/special/tests/test_basic.py =================================================================== --- trunk/Lib/special/tests/test_basic.py 2007-05-29 09:10:20 UTC (rev 3055) +++ trunk/Lib/special/tests/test_basic.py 2007-05-29 12:45:16 UTC (rev 3056) @@ -40,7 +40,6 @@ import scipy.special._cephes as cephes restore_path() - class test_cephes(NumpyTestCase): def check_airy(self): cephes.airy(0) @@ -458,7 +457,6 @@ cephes.wofz(0) class test_airy(NumpyTestCase): - def check_airy(self): #This tests the airy function to ensure 8 place accuracy in computation @@ -469,8 +467,6 @@ x = airy(-.36) assert_array_almost_equal(x,array([0.44508477,-0.23186773,0.44939534,0.48105354]),8) -class test_airye(NumpyTestCase): - def check_airye(self): a = airye(0.01) b = airy(0.01) @@ -481,27 +477,14 @@ b1[n] = b[n]*exp(-abs(real(2.0/3.0*0.01*sqrt(0.01)))) assert_array_almost_equal(a,b1,6) -class test_arange(NumpyTestCase): + def check_bi_zeros(self): + bi = bi_zeros(2) + bia = (array([-1.17371322, -3.2710930]), + array([-2.29443968, -4.07315509]), + array([-0.45494438, 0.39652284]), + array([ 0.60195789 , -0.76031014])) + assert_array_almost_equal(bi,bia,4) - def check_arange(self): - numstring = arange(0,2.21,.1) - assert_almost_equal(numstring,array([0.,0.1,0.2,0.3, - 0.4,0.5,0.6,0.7, - 0.8,0.9,1.,1.1, - 1.2,1.3,1.4,1.5, - 1.6,1.7,1.8,1.9, - 2.,2.1,2.2])) - numstringa = arange(3,4,.3) - assert_array_almost_equal(numstringa, array([3.,3.3,3.6,3.9])) - numstringb = arange(3,27,3) - assert_array_equal(numstringb,array([3,6,9,12, - 15,18,21,24])) - numstringc = arange(3.3,27,4) - assert_array_equal(numstringc,array([3.3,7.3,11.3,15.3, - 19.3,23.3])) - -class test_ai_zeros(NumpyTestCase): - def check_ai_zeros(self): ai = ai_zeros(1) assert_array_almost_equal(ai,(array([-2.33810741]), @@ -509,18 +492,7 @@ array([ 0.5357]), array([ 0.7012])),4) -class test_array(NumpyTestCase): - - def check_array(self): - x = array([1,2,3,4]) - y = array([1,2,3,4]) - z = x*y - assert_array_equal(z,array([1,4,9,16])) - a = arange(1,5,1) - assert_array_equal(a,x) - class test_assoc_laguerre(NumpyTestCase): - def check_assoc_laguerre(self): a1 = genlaguerre(11,1) a2 = assoc_laguerre(.2,11,1) @@ -529,36 +501,26 @@ assert_array_almost_equal(a2,a1(1),8) class test_besselpoly(NumpyTestCase): - def check_besselpoly(self): pass -class test_bei(NumpyTestCase): - +class test_kelvin(NumpyTestCase): def check_bei(self): mbei = bei(2) assert_almost_equal(mbei, 0.9722916273066613,5)#this may not be exact -class test_beip(NumpyTestCase): - def check_beip(self): mbeip = beip(2) assert_almost_equal(mbeip,0.91701361338403631,5)#this may not be exact -class test_ber(NumpyTestCase): - def check_ber(self): mber = ber(2) assert_almost_equal(mber,0.75173418271380821,5)#this may not be exact -class test_berp(NumpyTestCase): - def check_berp(self): mberp = berp(2) assert_almost_equal(mberp,-0.49306712470943909,5)#this may not be exact -class test_bei_zeros(NumpyTestCase): - def check_bei_zeros(self): bi = bi_zeros(5) assert_array_almost_equal(bi[0],array([-1.173713222709127, @@ -586,8 +548,6 @@ 0.929983638568022]),11) -class test_beip_zeros(NumpyTestCase): - def check_beip_zeros(self): bip = beip_zeros(5) assert_array_almost_equal(bip,array([ 3.772673304934953, @@ -595,7 +555,6 @@ 12.742147523633703, 17.193431752512542, 21.641143941167325]),4) -class test_ber_zeros(NumpyTestCase): def check_ber_zeros(self): ber = ber_zeros(5) @@ -605,8 +564,119 @@ 16.11356, 20.55463]),4) -class test_bernoulli(NumpyTestCase): + def check_berp_zeros(self): + brp = berp_zeros(5) + assert_array_almost_equal(brp,array([6.03871, + 10.51364, + 14.96844, + 19.41758, + 23.86430]),4) + def check_kelvin(self): + mkelv = kelvin(2) + assert_array_almost_equal(mkelv,(ber(2)+bei(2)*1j, + ker(2)+kei(2)*1j, + berp(2)+beip(2)*1j, + kerp(2)+keip(2)*1j),8) + + def check_kei(self): + mkei = kei(2) + assert_almost_equal(mkei,-0.20240006776470432,5) + + def check_keip(self): + mkeip = keip(2) + assert_almost_equal(mkeip,0.21980790991960536,5) + + def check_ker(self): + mker = ker(2) + assert_almost_equal(mker,-0.041664513991509472,5) + + def check_kerp(self): + mkerp = kerp(2) + assert_almost_equal(mkerp,-0.10660096588105264,5) + + def check_kei_zeros(self): + kei = kei_zeros(5) + assert_array_almost_equal(kei,array([ 3.91467, + 8.34422, + 12.78256, + 17.22314, + 21.66464]),4) + + def check_keip_zeros(self): + keip = keip_zeros(5) + assert_array_almost_equal(keip,array([ 4.93181, + 9.40405, + 13.85827, + 18.30717, + 22.75379]),4) + + + + # numbers come from 9.9 of A&S pg. 381 + def check_kelvin_zeros(self): + tmp = kelvin_zeros(5) + berz,beiz,kerz,keiz,berpz,beipz,kerpz,keipz = tmp + assert_array_almost_equal(berz,array([ 2.84892, + 7.23883, + 11.67396, + 16.11356, + 20.55463]),4) + assert_array_almost_equal(beiz,array([ 5.02622, + 9.45541, + 13.89349, + 18.33398, + 22.77544]),4) + assert_array_almost_equal(kerz,array([ 1.71854, + 6.12728, + 10.56294, + 15.00269, + 19.44382]),4) + assert_array_almost_equal(keiz,array([ 3.91467, + 8.34422, + 12.78256, + 17.22314, + 21.66464]),4) + assert_array_almost_equal(berpz,array([ 6.03871, + 10.51364, + 14.96844, + 19.41758, + 23.86430]),4) + assert_array_almost_equal(beipz,array([ 3.77267, + # table from 1927 had 3.77320 + # but this is more accurate + 8.28099, + 12.74215, + 17.19343, + 21.64114]),4) + assert_array_almost_equal(kerpz,array([ 2.66584, + 7.17212, + 11.63218, + 16.08312, + 20.53068]),4) + assert_array_almost_equal(keipz,array([ 4.93181, + 9.40405, + 13.85827, + 18.30717, + 22.75379]),4) + + def check_ker_zeros(self): + ker = ker_zeros(5) + assert_array_almost_equal(ker,array([ 1.71854, + 6.12728, + 10.56294, + 15.00269, + 19.44381]),4) + + def check_kerp_zeros(self): + kerp = kerp_zeros(5) + assert_array_almost_equal(kerp,array([ 2.66584, + 7.17212, + 11.63218, + 16.08312, + 20.53068]),4) + +class test_bernoulli(NumpyTestCase): def check_bernoulli(self): brn = bernoulli(5) assert_array_almost_equal(brn,array([1.0000, @@ -616,54 +686,27 @@ -0.0333, 0.0000]),4) -class test_berp_zeros(NumpyTestCase): - - def check_berp_zeros(self): - brp = berp_zeros(5) - assert_array_almost_equal(brp,array([6.03871, - 10.51364, - 14.96844, - 19.41758, - 23.86430]),4) class test_beta(NumpyTestCase): - def check_beta(self): bet = beta(2,4) betg = (gamma(2)*gamma(4))/gamma(6) assert_almost_equal(bet,betg,8) -class test_betaln(NumpyTestCase): - def check_betaln(self): betln = betaln(2,4) bet = log(abs(beta(2,4))) assert_almost_equal(betln,bet,8) -class test_betainc(NumpyTestCase): - def check_betainc(self): btinc = betainc(1,1,.2) assert_almost_equal(btinc,0.2,8) -class test_betaincinv(NumpyTestCase): - def check_betaincinv(self): y = betaincinv(2,4,.5) comp = betainc(2,4,y) assert_almost_equal(comp,.5,5) -class test_bi_zeros(NumpyTestCase): - - def check_bi_zeros(self): - bi = bi_zeros(2) - bia = (array([-1.17371322, -3.2710930]), - array([-2.29443968, -4.07315509]), - array([-0.45494438, 0.39652284]), - array([ 0.60195789 , -0.76031014])) - assert_array_almost_equal(bi,bia,4) - -class test_chebyc(NumpyTestCase): - +class test_cheby(NumpyTestCase): def check_chebyc(self): C0 = chebyc(0) C1 = chebyc(1) @@ -679,8 +722,6 @@ assert_array_almost_equal(C4.c,[1,0,-4,0,2],13) assert_array_almost_equal(C5.c,[1,0,-5,0,5,0],13) -class test_chebys(NumpyTestCase): - def check_chebys(self): S0 = chebys(0) S1 = chebys(1) @@ -695,8 +736,6 @@ assert_array_almost_equal(S4.c,[1,0,-3,0,1],13) assert_array_almost_equal(S5.c,[1,0,-4,0,3,0],13) -class test_chebyt(NumpyTestCase): - def check_chebyt(self): T0 = chebyt(0) T1 = chebyt(1) @@ -711,8 +750,6 @@ assert_array_almost_equal(T4.c,[8,0,-8,0,1],13) assert_array_almost_equal(T5.c,[16,0,-20,0,5,0],13) -class test_chebyu(NumpyTestCase): - def check_chebyu(self): U0 = chebyu(0) U1 = chebyu(1) @@ -727,15 +764,7 @@ assert_array_almost_equal(U4.c,[16,0,-12,0,1],13) assert_array_almost_equal(U5.c,[32,0,-32,0,6,0],13) -class test_choose(NumpyTestCase): - - def check_choose(self): - carray = [1,3,2,4,6,5] - chose = choose([1,3,5],carray) - assert_array_equal(chose,array([3,4,5])) - -class test_cbrt(NumpyTestCase): - +class test_trigonometric(NumpyTestCase): def check_cbrt(self): cb = cbrt(27) cbrl = 27**(1.0/3.0) @@ -746,8 +775,6 @@ cbrl1 = 27.9**(1.0/3.0) assert_almost_equal(cb1,cbrl1,8) -class test_cosdg(NumpyTestCase): - def check_cosdg(self): cdg = cosdg(90) cdgrl = cos(pi/2.0) @@ -758,15 +785,11 @@ cdgmrl = cos(pi/6.0) assert_almost_equal(cdgm,cdgmrl,8) -class test_cosm1(NumpyTestCase): - def check_cosm1(self): cs = (cosm1(0),cosm1(.3),cosm1(pi/10)) csrl = (cos(0)-1,cos(.3)-1,cos(pi/10)-1) assert_array_almost_equal(cs,csrl,8) -class test_cotdg(NumpyTestCase): - def check_cotdg(self): ct = cotdg(30) ctrl = tan(pi/6.0)**(-1) @@ -792,21 +815,66 @@ assert_almost_equal(cotdg(-315), 1.0, 14) assert_almost_equal(cotdg(765), 1.0, 14) -class test_ellipj(NumpyTestCase): + def check_sinc(self): + c = arange(-2,2,.1) + y = sinc(c) + yre = sin(pi*c)/(pi*c) + yre[20] = 1.0 + assert_array_almost_equal(y, yre, 4) + def check_0(self): + x = 0.0 + assert_equal(sinc(x),1.0) + def check_sindg(self): + sn = sindg(90) + assert_equal(sn,1.0) + + def check_sindgmore(self): + snm = sindg(30) + snmrl = sin(pi/6.0) + assert_almost_equal(snm,snmrl,8) + snm1 = sindg(45) + snmrl1 = sin(pi/4.0) + assert_almost_equal(snm1,snmrl1,8) + +class test_tandg(NumpyTestCase): + + def check_tandg(self): + tn = tandg(30) + tnrl = tan(pi/6.0) + assert_almost_equal(tn,tnrl,8) + + def check_tandgmore(self): + tnm = tandg(45) + tnmrl = tan(pi/4.0) + assert_almost_equal(tnm,tnmrl,8) + tnm1 = tandg(60) + tnmrl1 = tan(pi/3.0) + assert_almost_equal(tnm1,tnmrl1,8) + + def check_specialpoints(self): + assert_almost_equal(tandg(0), 0.0, 14) + assert_almost_equal(tandg(45), 1.0, 14) + assert_almost_equal(tandg(-45), -1.0, 14) + assert_almost_equal(tandg(135), -1.0, 14) + assert_almost_equal(tandg(-135), 1.0, 14) + assert_almost_equal(tandg(180), 0.0, 14) + assert_almost_equal(tandg(-180), 0.0, 14) + assert_almost_equal(tandg(225), 1.0, 14) + assert_almost_equal(tandg(-225), -1.0, 14) + assert_almost_equal(tandg(315), -1.0, 14) + assert_almost_equal(tandg(-315), 1.0, 14) + +class test_ellip(NumpyTestCase): def check_ellipj(self): el = ellipj(0.2,0) rel = [sin(0.2),cos(0.2),1.0,0.20] assert_array_almost_equal(el,rel,13) -class test_ellipk(NumpyTestCase): - def check_ellipk(self): elk = ellipk(.2) assert_almost_equal(elk,1.659623598610528,11) -class test_ellipkinc(NumpyTestCase): - def check_ellipkinc(self): elkinc = ellipkinc(pi/2,.2) elk = ellipk(0.2) @@ -818,15 +886,10 @@ assert_almost_equal(elkinc,0.79398143,8) # From pg. 614 of A & S - -class test_ellipe(NumpyTestCase): - def check_ellipe(self): ele = ellipe(.2) assert_almost_equal(ele,1.4890350580958529,8) -class test_ellipeinc(NumpyTestCase): - def check_ellipeinc(self): eleinc = ellipeinc(pi/2,.2) ele = ellipe(0.2) @@ -844,8 +907,6 @@ er = erf(.25) assert_almost_equal(er,0.2763263902,8) -class test_erf_zeros(NumpyTestCase): - def check_erf_zeros(self): erz = erf_zeros(5) erzr= array([1.45061616+1.88094300j, @@ -855,20 +916,14 @@ 3.76900557+4.06069723j]) assert_array_almost_equal(erz,erzr,4) -class test_erfcinv(NumpyTestCase): - def check_erfcinv(self): i = erfcinv(1) assert_equal(i,0) -class test_erfinv(NumpyTestCase): - def check_erfinv(self): i = erfinv(0) assert_equal(i,0) -class test_errprint(NumpyTestCase): - def check_errprint(self): a = errprint() b = 1-a #a is the state 1-a inverts state @@ -879,7 +934,6 @@ #assert_equal(d,1-a) class test_euler(NumpyTestCase): - def check_euler(self): eu0 = euler(0) eu1 = euler(1) @@ -901,8 +955,7 @@ errmax = max(err) assert_almost_equal(errmax, 0.0, 14) -class test_exp2(NumpyTestCase): - +class test_exp(NumpyTestCase): def check_exp2(self): ex = exp2(2) exrl = 2**2 @@ -913,8 +966,6 @@ exmrl = 2**(2.5) assert_almost_equal(exm,exmrl,8) -class test_exp10(NumpyTestCase): - def check_exp10(self): ex = exp10(2) exrl = 10**2 @@ -925,8 +976,6 @@ exmrl = 10**(2.5) assert_almost_equal(exm,exmrl,8) -class test_expm1(NumpyTestCase): - def check_expm1(self): ex = (expm1(2),expm1(3),expm1(4)) exrl = (exp(2)-1,exp(3)-1,exp(4)-1) @@ -938,13 +987,10 @@ assert_array_almost_equal(ex1,exrl1,8) class test_fresnel(NumpyTestCase): - def check_fresnel(self): frs = array(fresnel(.5)) assert_array_almost_equal(frs,array([0.064732432859999287, 0.49234422587144644]),8) -class test_fresnel_zeros(NumpyTestCase): - # values from pg 329 Table 7.11 of A & S # slightly corrected in 4th decimal place def check_fresnel_zeros(self): @@ -978,47 +1024,40 @@ class test_gamma(NumpyTestCase): - def check_gamma(self): - gam = gamma(5) assert_equal(gam,24.0) -class test_gammaln(NumpyTestCase): - def check_gammaln(self): gamln = gammaln(3) lngam = log(gamma(3)) assert_almost_equal(gamln,lngam,8) -class test_gammainc(NumpyTestCase): - def check_gammainc(self): gama = gammainc(.5,.5) assert_almost_equal(gama,.7,1) -class test_gammaincc(NumpyTestCase): - def check_gammaincc(self): gicc = gammaincc(.5,.5) greal = 1 - gammainc(.5,.5) assert_almost_equal(gicc,greal,8) -class test_gammainccinv(NumpyTestCase): - def check_gammainccinv(self): gccinv = gammainccinv(.5,.5) gcinv = gammaincinv(.5,.5) assert_almost_equal(gccinv,gcinv,8) -class test_gammaincinv(NumpyTestCase): - def check_gammaincinv(self): y = gammaincinv(.4,.4) x = gammainc(.4,y) assert_almost_equal(x,0.4,1) -class test_hankel1(NumpyTestCase): + def check_rgamma(self): + rgam = rgamma(8) + rlgam = 1/gamma(8) + assert_almost_equal(rgam,rlgam,8) + +class test_hankel(NumpyTestCase): def check_negv(self): assert_almost_equal(hankel1(-3,2), -hankel1(3,2), 14) @@ -1027,7 +1066,6 @@ hankrl = (jv(1,.1)+yv(1,.1)*1j) assert_almost_equal(hank1,hankrl,8) -class test_hankel1e(NumpyTestCase): def check_negv(self): assert_almost_equal(hankel1e(-3,2), -hankel1e(3,2), 14) @@ -1036,7 +1074,6 @@ hankrle = hankel1(1,.1)*exp(-.1j) assert_almost_equal(hank1e,hankrle,8) -class test_hankel2(NumpyTestCase): def check_negv(self): assert_almost_equal(hankel2(-3,2), -hankel2(3,2), 14) @@ -1045,7 +1082,6 @@ hankrl2 = (jv(1,.1)-yv(1,.1)*1j) assert_almost_equal(hank2,hankrl2,8) -class test_hankel2e(NumpyTestCase): def check_negv(self): assert_almost_equal(hankel2e(-3,2), -hankel2e(3,2), 14) @@ -1055,7 +1091,6 @@ assert_almost_equal(hank2e,hankrl2e,8) class test_hermite(NumpyTestCase): - def check_hermite(self): H0 = hermite(0) H1 = hermite(1) @@ -1118,44 +1153,30 @@ 0,15*poch(a,3),0])/15.0,11) -class test_h1vp(NumpyTestCase): - +class test_hyper(NumpyTestCase): def check_h1vp(self): - h1 = h1vp(1,.1) h1real = (jvp(1,.1)+yvp(1,.1)*1j) assert_almost_equal(h1,h1real,8) -class test_h2vp(NumpyTestCase): - def check_h2vp(self): h2 = h2vp(1,.1) h2real = (jvp(1,.1)-yvp(1,.1)*1j) assert_almost_equal(h2,h2real,8) -class test_hyp0f1(NumpyTestCase): - def check_hyp0f1(self): pass -class test_hyp1f1(NumpyTestCase): - def check_hyp1f1(self): - hyp1 = hyp1f1(.1,.1,.3) assert_almost_equal(hyp1, 1.3498588075760032,7) -class test_hyp1f2(NumpyTestCase): - def check_hyp1f2(self): pass -class test_hyp2f0(NumpyTestCase): - def check_hyp2f0(self): pass -class test_hyp2f1(NumpyTestCase): def check_hyp2f1(self): # a collection of special cases taken from AMS 55 values = [[0.5, 1, 1.5, 0.2**2, 0.5/0.2*log((1+0.2)/(1-0.2))], @@ -1179,13 +1200,9 @@ cv = hyp2f1(a, b, c, x) assert_almost_equal(cv, v, 8, err_msg='test #%d' % i) -class test_hyp3f0(NumpyTestCase): - def check_hyp3f0(self): pass -class test_hyperu(NumpyTestCase): - def check_hyperu(self): val1 = hyperu(1,0.1,100) assert_almost_equal(val1,0.0098153,7) @@ -1199,7 +1216,7 @@ /(gamma(a)*gamma(2-b))) assert_array_almost_equal(hypu,hprl,12) -class test_i0(NumpyTestCase): +class test_bessel(NumpyTestCase): def check_i0(self): values = [[0.0, 1.0], [1e-10, 1.0], @@ -1214,15 +1231,11 @@ cv = i0(x) * exp(-x) assert_almost_equal(cv, v, 8, err_msg='test #%d' % i) -class test_i0e(NumpyTestCase): - def check_i0e(self): oize = i0e(.1) oizer = ive(0,.1) assert_almost_equal(oize,oizer,8) -class test_i1(NumpyTestCase): - def check_i1(self): values = [[0.0, 0.0], [1e-10, 0.4999999999500000e-10], @@ -1236,38 +1249,27 @@ cv = i1(x) * exp(-x) assert_almost_equal(cv, v, 8, err_msg='test #%d' % i) -class test_i1e(NumpyTestCase): - def check_i1e(self): oi1e = i1e(.1) oi1er = ive(1,.1) assert_almost_equal(oi1e,oi1er,8) -class test_iti0k0(NumpyTestCase): - def check_iti0k0(self): iti0 = array(iti0k0(5)) assert_array_almost_equal(iti0,array([31.848667776169801, 1.5673873907283657]),5) -class test_it2i0k0(NumpyTestCase): - def check_it2i0k0(self): it2k = it2i0k0(.1) assert_array_almost_equal(it2k,array([0.0012503906973464409, 3.3309450354686687]),6) -class test_itj0y0(NumpyTestCase): - def check_itj0y0(self): it0 = array(itj0y0(.2)) assert_array_almost_equal(it0,array([0.19933433254006822, -0.34570883800412566]),8) -class test_it2j0y0(NumpyTestCase): - def check_it2j0y0(self): it2 = array(it2j0y0(.2)) assert_array_almost_equal(it2,array([0.0049937546274601858, -0.43423067011231614]),8) -class test_iv(NumpyTestCase): def check_negv(self): assert_equal(iv(3,2), iv(-3,2)) @@ -1275,7 +1277,6 @@ iv1 = iv(0,.1)*exp(-.1) assert_almost_equal(iv1,0.90710092578230106,10) -class test_ive(NumpyTestCase): def check_negv(self): assert_equal(ive(3,2), ive(-3,2)) @@ -1284,7 +1285,6 @@ iv1 = iv(0,.1)*exp(-.1) assert_almost_equal(ive1,iv1,10) -class test_ivp(NumpyTestCase): def check_ivp0(self): assert_almost_equal(iv(1,2), ivp(0,2), 10) @@ -1293,48 +1293,20 @@ x = ivp(1,2) assert_almost_equal(x,y,10) -class test_j0(NumpyTestCase): - def check_j0(self): oz = j0(.1) ozr = jn(0,.1) assert_almost_equal(oz,ozr,8) -class test_j1(NumpyTestCase): - def check_j1(self): o1 = j1(.1) o1r = jn(1,.1) assert_almost_equal(o1,o1r,8) -class test_jacobi(NumpyTestCase): - - def check_jacobi(self): - a = 5*rand() - 1 - b = 5*rand() - 1 - P0 = jacobi(0,a,b) - P1 = jacobi(1,a,b) - P2 = jacobi(2,a,b) - P3 = jacobi(3,a,b) - - assert_array_almost_equal(P0.c,[1],13) - assert_array_almost_equal(P1.c,array([a+b+2,a-b])/2.0,13) - cp = [(a+b+3)*(a+b+4), 4*(a+b+3)*(a+2), 4*(a+1)*(a+2)] - p2c = [cp[0],cp[1]-2*cp[0],cp[2]-cp[1]+cp[0]] - assert_array_almost_equal(P2.c,array(p2c)/8.0,13) - cp = [(a+b+4)*(a+b+5)*(a+b+6),6*(a+b+4)*(a+b+5)*(a+3), - 12*(a+b+4)*(a+2)*(a+3),8*(a+1)*(a+2)*(a+3)] - p3c = [cp[0],cp[1]-3*cp[0],cp[2]-2*cp[1]+3*cp[0],cp[3]-cp[2]+cp[1]-cp[0]] - assert_array_almost_equal(P3.c,array(p3c)/48.0,13) - - -class test_jn(NumpyTestCase): - def check_jn(self): jnnr = jn(1,.2) assert_almost_equal(jnnr,0.099500832639235995,8) -class test_jv(NumpyTestCase): def check_negv(self): assert_almost_equal(jv(-3,2), -jv(3,2), 14) @@ -1349,7 +1321,6 @@ yc = jv(v, x) assert_almost_equal(yc, y, 8, err_msg='test #%d' % i) -class test_jve(NumpyTestCase): def check_negv(self): assert_almost_equal(jve(-3,2), -jve(3,2), 14) @@ -1361,8 +1332,6 @@ jvexpr = jv(1,z)*exp(-abs(z.imag)) assert_almost_equal(jvexp1,jvexpr,8) -class test_jn_zeros(NumpyTestCase): - def check_jn_zeros(self): jn0 = jn_zeros(0,5) jn1 = jn_zeros(1,5) @@ -1377,8 +1346,6 @@ 13.32369, 16.47063]),4) -class test_jnjnp_zeros(NumpyTestCase): - def check_jnjnp_zeros(self): pass #jnjp = jnjnp(3) @@ -1386,8 +1353,6 @@ #I don't think specfun jdzo is working properly the outputs do not seem to correlate #to the inputs -class test_jnp_zeros(NumpyTestCase): - def check_jnp_zeros(self): jnp = jnp_zeros(1,5) assert_array_almost_equal(jnp, array([ 1.84118, @@ -1396,8 +1361,6 @@ 11.70600, 14.86359]),4) -class test_jnyn_zeros(NumpyTestCase): - def check_jnyn_zeros(self): jnz = jnyn_zeros(1,5) assert_array_almost_equal(jnz,(array([ 3.83171, @@ -1421,172 +1384,53 @@ 13.28576, 16.44006])),4) -class test_jvp(NumpyTestCase): - def check_jvp(self): jvprim = jvp(2,2) jv0 = (jv(1,2)-jv(3,2))/2 assert_almost_equal(jvprim,jv0,10) -class test_k0(NumpyTestCase): - def check_k0(self): ozk = k0(.1) ozkr = kv(0,.1) assert_almost_equal(ozk,ozkr,8) -class test_k0e(NumpyTestCase): - def check_k0e(self): ozke = k0e(.1) ozker = kve(0,.1) assert_almost_equal(ozke,ozker,8) -class test_k1(NumpyTestCase): - def check_k1(self): o1k = k1(.1) o1kr = kv(1,.1) assert_almost_equal(o1k,o1kr,8) -class test_k1e(NumpyTestCase): - def check_k1e(self): o1ke = k1e(.1) o1ker = kve(1,.1) assert_almost_equal(o1ke,o1ker,8) -class test_kei(NumpyTestCase): + def check_jacobi(self): + a = 5*rand() - 1 + b = 5*rand() - 1 + P0 = jacobi(0,a,b) + P1 = jacobi(1,a,b) + P2 = jacobi(2,a,b) + P3 = jacobi(3,a,b) - def check_kei(self): - mkei = kei(2) - assert_almost_equal(mkei,-0.20240006776470432,5) + assert_array_almost_equal(P0.c,[1],13) + assert_array_almost_equal(P1.c,array([a+b+2,a-b])/2.0,13) + cp = [(a+b+3)*(a+b+4), 4*(a+b+3)*(a+2), 4*(a+1)*(a+2)] + p2c = [cp[0],cp[1]-2*cp[0],cp[2]-cp[1]+cp[0]] + assert_array_almost_equal(P2.c,array(p2c)/8.0,13) + cp = [(a+b+4)*(a+b+5)*(a+b+6),6*(a+b+4)*(a+b+5)*(a+3), + 12*(a+b+4)*(a+2)*(a+3),8*(a+1)*(a+2)*(a+3)] + p3c = [cp[0],cp[1]-3*cp[0],cp[2]-2*cp[1]+3*cp[0],cp[3]-cp[2]+cp[1]-cp[0]] + assert_array_almost_equal(P3.c,array(p3c)/48.0,13) -class test_kelvin(NumpyTestCase): - - def check_kelvin(self): - mkelv = kelvin(2) - assert_array_almost_equal(mkelv,(ber(2)+bei(2)*1j, - ker(2)+kei(2)*1j, - berp(2)+beip(2)*1j, - kerp(2)+keip(2)*1j),8) - -class test_keip(NumpyTestCase): - - def check_keip(self): - mkeip = keip(2) - assert_almost_equal(mkeip,0.21980790991960536,5) - -class test_ker(NumpyTestCase): - - def check_ker(self): - mker = ker(2) - assert_almost_equal(mker,-0.041664513991509472,5) - -class test_kerp(NumpyTestCase): - - def check_kerp(self): - mkerp = kerp(2) - assert_almost_equal(mkerp,-0.10660096588105264,5) - -class test_kei_zeros(NumpyTestCase): - - def check_kei_zeros(self): - kei = kei_zeros(5) - assert_array_almost_equal(kei,array([ 3.91467, - 8.34422, - 12.78256, - 17.22314, - 21.66464]),4) - -class test_keip_zeros(NumpyTestCase): - - def check_keip_zeros(self): - keip = keip_zeros(5) - assert_array_almost_equal(keip,array([ 4.93181, - 9.40405, - 13.85827, - 18.30717, - 22.75379]),4) - - - -class test_kelvin_zeros(NumpyTestCase): - - # numbers come from 9.9 of A&S pg. 381 - def check_kelvin_zeros(self): - tmp = kelvin_zeros(5) - berz,beiz,kerz,keiz,berpz,beipz,kerpz,keipz = tmp - assert_array_almost_equal(berz,array([ 2.84892, - 7.23883, - 11.67396, - 16.11356, - 20.55463]),4) - assert_array_almost_equal(beiz,array([ 5.02622, - 9.45541, - 13.89349, - 18.33398, - 22.77544]),4) - assert_array_almost_equal(kerz,array([ 1.71854, - 6.12728, - 10.56294, - 15.00269, - 19.44382]),4) - assert_array_almost_equal(keiz,array([ 3.91467, - 8.34422, - 12.78256, - 17.22314, - 21.66464]),4) - assert_array_almost_equal(berpz,array([ 6.03871, - 10.51364, - 14.96844, - 19.41758, - 23.86430]),4) - assert_array_almost_equal(beipz,array([ 3.77267, - # table from 1927 had 3.77320 - # but this is more accurate - 8.28099, - 12.74215, - 17.19343, - 21.64114]),4) - assert_array_almost_equal(kerpz,array([ 2.66584, - 7.17212, - 11.63218, - 16.08312, - 20.53068]),4) - assert_array_almost_equal(keipz,array([ 4.93181, - 9.40405, - 13.85827, - 18.30717, - 22.75379]),4) - -class test_ker_zeros(NumpyTestCase): - - def check_ker_zeros(self): - ker = ker_zeros(5) - assert_array_almost_equal(ker,array([ 1.71854, - 6.12728, - 10.56294, - 15.00269, - 19.44381]),4) - -class test_kerp_zeros(NumpyTestCase): - - def check_kerp_zeros(self): - kerp = kerp_zeros(5) - assert_array_almost_equal(kerp,array([ 2.66584, - 7.17212, - 11.63218, - 16.08312, - 20.53068]),4) - -class test_kn(NumpyTestCase): - def check_kn(self): kn1 = kn(0,.2) assert_almost_equal(kn1,1.7527038555281462,8) -class test_kv(NumpyTestCase): def check_negv(self): assert_equal(kv(3.0, 2.2), kv(-3.0, 2.2)) @@ -1601,7 +1445,6 @@ assert_almost_equal(kv2, 49.51242928773287, 10) -class test_kve(NumpyTestCase): def check_negv(self): assert_equal(kve(3.0, 2.2), kve(-3.0, 2.2)) @@ -1614,7 +1457,6 @@ kv2 = kv(0,z)*exp(z) assert_almost_equal(kve2,kv2,8) -class test_kvp(NumpyTestCase): def check_kvp_v0n1(self): z = 2.2 assert_almost_equal(-kv(1,z), kvp(0,z, n=1), 10) @@ -1633,8 +1475,69 @@ x = kvp(v, z, n=2) assert_almost_equal(xc, x, 10) -class test_laguerre(NumpyTestCase): + def check_y0(self): + oz = y0(.1) + ozr = yn(0,.1) + assert_almost_equal(oz,ozr,8) + def check_y1(self): + o1 = y1(.1) + o1r = yn(1,.1) + assert_almost_equal(o1,o1r,8) + + def check_y0_zeros(self): + yo,ypo = y0_zeros(2) + zo,zpo = y0_zeros(2,complex=1) + all = r_[yo,zo] + allval = r_[ypo,zpo] + assert_array_almost_equal(abs(yv(0.0,all)),0.0,11) + assert_array_almost_equal(abs(yv(1,all)-allval),0.0,11) + + + def check_y1_zeros(self): + y1 = y1_zeros(1) + assert_array_almost_equal(y1,(array([2.19714]),array([0.52079])),5) + + def check_y1p_zeros(self): + y1p = y1p_zeros(1,complex=1) + assert_array_almost_equal(y1p,(array([ 0.5768+0.904j]), array([-0.7635+0.5892j])),3) + + def check_yn_zeros(self): + an = yn_zeros(4,2) + assert_array_almost_equal(an,array([ 5.64515, 9.36162]),5) + + def check_ynp_zeros(self): + ao = ynp_zeros(0,2) + assert_array_almost_equal(ao,array([ 2.19714133, 5.42968104]),6) + + def check_yn(self): + yn2n = yn(1,.2) + assert_almost_equal(yn2n,-3.3238249881118471,8) + + def check_negv(self): + assert_almost_equal(yv(-3,2), -yv(3,2), 14) + + def check_yv(self): + yv2 = yv(1,.2) + assert_almost_equal(yv2,-3.3238249881118471,8) + + def check_negv(self): + assert_almost_equal(yve(-3,2), -yve(3,2), 14) + + def check_yve(self): + yve2 = yve(1,.2) + assert_almost_equal(yve2,-3.3238249881118471,8) + yve2r = yv(1,.2+1j)*exp(-1) + yve22 = yve(1,.2+1j) + assert_almost_equal(yve22,yve2r,8) + + def check_yvp(self): + yvpr = (yv(1,.2) - yv(3,.2))/2.0 + yvp1 = yvp(2,.2) + assert_array_almost_equal(yvp1,yvpr,10) + + +class test_laguerre(NumpyTestCase): def check_laguerre(self): lag0 = laguerre(0) lag1 = laguerre(1) @@ -1663,7 +1566,6 @@ # Base polynomials come from Abrahmowitz and Stegan class test_legendre(NumpyTestCase): - def check_legendre(self): leg0 = legendre(0) leg1 = legendre(1) @@ -1679,8 +1581,7 @@ assert_almost_equal(leg5.c,array([63,0,-70,0,15,0])/8.0) -class test_lmbda(NumpyTestCase): - +class test_lambda(NumpyTestCase): def check_lmbda(self): lam = lmbda(1,.1) lamr = (array([jn(0,.1), 2*jn(1,.1)/.1]), @@ -1688,7 +1589,6 @@ assert_array_almost_equal(lam,lamr,8) class test_log1p(NumpyTestCase): - def check_log1p(self): l1p = (log1p(10),log1p(11),log1p(12)) l1prl = (log(11),log(12),log(13)) @@ -1699,10 +1599,8 @@ l1pmrl = (log(2),log(2.1),log(2.2)) assert_array_almost_equal(l1pm,l1pmrl,8) -class test_lpmn(NumpyTestCase): - +class test_legendre_functions(NumpyTestCase): def check_lpmn(self): - lp = lpmn(0,2,.5) assert_array_almost_equal(lp,(array([ [ 1.00000 , 0.50000, @@ -1711,8 +1609,6 @@ 1.00000 , 1.50000]])),4) -class test_lpn(NumpyTestCase): - def check_lpn(self): lpnf = lpn(2,.5) assert_array_almost_equal(lpnf,(array( [ 1.00000 , @@ -1722,14 +1618,10 @@ 1.00000 , 1.50000])),4) -class test_lpmv(NumpyTestCase): - def check_lpmv(self): lp = lpmv(0,2,.5) assert_almost_equal(lp,-0.125,3) -class test_lqmn(NumpyTestCase): - def check_lqmn(self): lqmnf = lqmn(0,2,.5) lqmnf = lqmn(0,2,.5) @@ -1737,45 +1629,33 @@ assert_array_almost_equal(lqmnf[0][0],lqf[0],4) assert_array_almost_equal(lqmnf[1][0],lqf[1],4) - - - -class test_lqn(NumpyTestCase): - def check_lqn(self): lqf = lqn(2,.5) assert_array_almost_equal(lqf,(array([ 0.5493, -0.7253, -0.8187]), array([ 1.3333, 1.216 , -0.8427])),4) -class test_mathieu_a(NumpyTestCase): +class test_mathieu(NumpyTestCase): def check_mathieu_a(self): pass -class test_mathieu_even_coef(NumpyTestCase): - def check_mathieu_even_coef(self): mc = mathieu_even_coef(2,5) #Q not defined broken and cannot figure out proper reporting order -class test_mathieu_odd_coef(NumpyTestCase): - def check_mathieu_odd_coef(self): pass #same problem as above -class test_modfresnelp(NumpyTestCase): +class test_fresnel_integral(NumpyTestCase): def check_modfresnelp(self): pass -class test_modfresnelm(NumpyTestCase): - def check_modfresnelm(self): pass class test_obl_cv_seq(NumpyTestCase): - def check_obl_cv_seq(self): obl = obl_cv_seq(0,3,1) assert_array_almost_equal(obl,array([ -0.348602, @@ -1783,8 +1663,7 @@ 5.486800, 11.492120]),5) -class test_pbdn_seq(NumpyTestCase): - +class test_parabolic_cylinder(NumpyTestCase): def check_pbdn_seq(self): pb = pbdn_seq(1,.1) assert_array_almost_equal(pb,(array([ 0.9975, @@ -1792,26 +1671,16 @@ array([-0.0499, 0.9925])),4) -class test_pbdv(NumpyTestCase): - def check_pbdv(self): pbv = pbdv(1,.2) derrl = 1/2*(.2)*pbdv(1,.2)[0] - pbdv(0,.2)[0] -class _test_pbdv_seq(NumpyTestCase): - def check_pbdv_seq(self): pbn = pbdn_seq(1,.1) pbv = pbdv_seq(1,.1) assert_array_almost_equal(pbv,(real(pbn[0]),real(pbn[1])),4) -class test_pbvv_seq(NumpyTestCase): - - def check_pbvv_seq(self): - pass - class test_polygamma(NumpyTestCase): - # from Table 6.2 (pg. 271) of A&S def check_polygamma(self): poly2 = polygamma(2,1) @@ -1820,7 +1689,6 @@ assert_almost_equal(poly3,6.4939394023,10) class test_pro_cv_seq(NumpyTestCase): - def check_pro_cv_seq(self): prol = pro_cv_seq(0,3,1) assert_array_almost_equal(prol,array([ 0.319000, @@ -1829,13 +1697,11 @@ 12.514462]),5) class test_psi(NumpyTestCase): - def check_psi(self): ps = psi(1) assert_almost_equal(ps,-0.57721566490153287,8) class test_radian(NumpyTestCase): - def check_radian(self): rad = radian(90,0,0) assert_almost_equal(rad,pi/2.0,5) @@ -1844,41 +1710,18 @@ rad1 = radian(90,1,60) assert_almost_equal(rad1,pi/2+0.0005816135199345904,5) -class test_reshape(NumpyTestCase): - - def check_reshape(self): - a = (array([1,2,3]),array([4,5,6])) - b = reshape(a,(2,3)) - assert_array_equal(b,array([[1, 2, 3], - [4, 5, 6]])) - c = reshape(a,(3,2)) - assert_array_equal(c,array([[1, 2], - [3, 4], - [5, 6]])) - -class test_rgamma(NumpyTestCase): - - def check_rgamma(self): - rgam = rgamma(8) - rlgam = 1/gamma(8) - assert_almost_equal(rgam,rlgam,8) - -class test_riccati_jn(NumpyTestCase): - +class test_riccati(NumpyTestCase): def check_riccati_jn(self): jnrl = (sph_jn(1,.2)[0]*.2,sph_jn(1,.2)[0]+sph_jn(1,.2)[1]*.2) ricjn = riccati_jn(1,.2) assert_array_almost_equal(ricjn,jnrl,8) -class test_riccati_yn(NumpyTestCase): - def check_riccati_yn(self): ynrl = (sph_yn(1,.2)[0]*.2,sph_yn(1,.2)[0]+sph_yn(1,.2)[1]*.2) ricyn = riccati_yn(1,.2) assert_array_almost_equal(ricyn,ynrl,8) class test_round(NumpyTestCase): - def check_round(self): rnd = map(int,(round(10.1),round(10.4),round(10.5),round(10.6))) @@ -1992,41 +1835,10 @@ assert_array_almost_equal(G4.c,ge4.c,13) assert_array_almost_equal(G5.c,ge5.c,13) - -class test_sinc(NumpyTestCase): - - def check_sinc(self): - c = arange(-2,2,.1) - y = sinc(c) - yre = sin(pi*c)/(pi*c) - yre[20] = 1.0 - assert_array_almost_equal(y, yre, 4) - def check_0(self): - x = 0.0 - assert_equal(sinc(x),1.0) - -class test_sindg(NumpyTestCase): - - def check_sindg(self): - sn = sindg(90) - assert_equal(sn,1.0) - - def check_sindgmore(self): - snm = sindg(30) - snmrl = sin(pi/6.0) - assert_almost_equal(snm,snmrl,8) - snm1 = sindg(45) - snmrl1 = sin(pi/4.0) - assert_almost_equal(snm1,snmrl1,8) - -class test_sph_harm(NumpyTestCase): - +class test_spherical(NumpyTestCase): def check_sph_harm(self): pass - -class test_sph_in(NumpyTestCase): - def check_sph_in(self): i1n = sph_in(1,.2) inp0 = (i1n[0][1]) @@ -2035,15 +1847,11 @@ 0.066933714568029540839]),12) assert_array_almost_equal(i1n[1],[inp0,inp1],12) -class test_sph_inkn(NumpyTestCase): - def check_sph_inkn(self): spikn = r_[sph_in(1,.2)+sph_kn(1,.2)] inkn = r_[sph_inkn(1,.2)] assert_array_almost_equal(inkn,spikn,10) -class test_sph_jn(NumpyTestCase): - def check_sph_jn(self): s1 = sph_jn(2,.2) s10 = -s1[0][1] @@ -2054,15 +1862,11 @@ 0.0026590560795273856680],12) assert_array_almost_equal(s1[1],[s10,s11,s12],12) -class test_sph_jnyn(NumpyTestCase): - def check_sph_jnyn(self): jnyn = r_[sph_jn(1,.2) + sph_yn(1,.2)] # tuple addition jnyn1 = r_[sph_jnyn(1,.2)] assert_array_almost_equal(jnyn1,jnyn,9) -class test_sph_kn(NumpyTestCase): - def check_sph_kn(self): kn = sph_kn(2,.2) kn0 = -kn[0][1] @@ -2073,8 +1877,6 @@ 585.15696310385559829],12) assert_array_almost_equal(kn[1],[kn0,kn1,kn2],9) -class test_sph_yn(NumpyTestCase): - def check_sph_yn(self): sy1 = sph_yn(2,.2)[0][2] sy2 = sph_yn(0,.2)[0][0] @@ -2084,130 +1886,5 @@ sy3 = sph_yn(1,.2)[1][1] assert_almost_equal(sy3,sphpy,4) #compare correct derivative val. (correct =-system val). -class test_take(NumpyTestCase): - - def check_take(self): - a = array([0,1,2,3,4,5,6,7,8]) - tka = take(a,(0,4,5,8),axis=0) - assert_array_equal(tka,array([0,4,5,8])) - -class test_tandg(NumpyTestCase): - - def check_tandg(self): - tn = tandg(30) - tnrl = tan(pi/6.0) - assert_almost_equal(tn,tnrl,8) - - def check_tandgmore(self): - tnm = tandg(45) - tnmrl = tan(pi/4.0) - assert_almost_equal(tnm,tnmrl,8) - tnm1 = tandg(60) - tnmrl1 = tan(pi/3.0) - assert_almost_equal(tnm1,tnmrl1,8) - - def check_specialpoints(self): - assert_almost_equal(tandg(0), 0.0, 14) - assert_almost_equal(tandg(45), 1.0, 14) - assert_almost_equal(tandg(-45), -1.0, 14) - assert_almost_equal(tandg(135), -1.0, 14) - assert_almost_equal(tandg(-135), 1.0, 14) - assert_almost_equal(tandg(180), 0.0, 14) - assert_almost_equal(tandg(-180), 0.0, 14) - assert_almost_equal(tandg(225), 1.0, 14) - assert_almost_equal(tandg(-225), -1.0, 14) - assert_almost_equal(tandg(315), -1.0, 14) - assert_almost_equal(tandg(-315), 1.0, 14) - -class test_y0(NumpyTestCase): - - def check_y0(self): - oz = y0(.1) - ozr = yn(0,.1) - assert_almost_equal(oz,ozr,8) - -class test_y1(NumpyTestCase): - - def check_y1(self): - o1 = y1(.1) - o1r = yn(1,.1) - assert_almost_equal(o1,o1r,8) - -class test_y0_zeros(NumpyTestCase): - - def check_y0_zeros(self): - yo,ypo = y0_zeros(2) - zo,zpo = y0_zeros(2,complex=1) - all = r_[yo,zo] - allval = r_[ypo,zpo] - assert_array_almost_equal(abs(yv(0.0,all)),0.0,11) - assert_array_almost_equal(abs(yv(1,all)-allval),0.0,11) - - -class test_y1_zeros(NumpyTestCase): - - def check_y1_zeros(self): - y1 = y1_zeros(1) - assert_array_almost_equal(y1,(array([2.19714]),array([0.52079])),5) - -class test_y1p_zeros(NumpyTestCase): - - def check_y1p_zeros(self): - y1p = y1p_zeros(1,complex=1) - assert_array_almost_equal(y1p,(array([ 0.5768+0.904j]), array([-0.7635+0.5892j])),3) - -class test_yn_zeros(NumpyTestCase): - - def check_yn_zeros(self): - an = yn_zeros(4,2) - assert_array_almost_equal(an,array([ 5.64515, 9.36162]),5) - -class test_ynp_zeros(NumpyTestCase): - - def check_ynp_zeros(self): - ao = ynp_zeros(0,2) - assert_array_almost_equal(ao,array([ 2.19714133, 5.42968104]),6) - -class test_yn(NumpyTestCase): - - def check_yn(self): - yn2n = yn(1,.2) - assert_almost_equal(yn2n,-3.3238249881118471,8) - -class test_yv(NumpyTestCase): - def check_negv(self): - assert_almost_equal(yv(-3,2), -yv(3,2), 14) - - def check_yv(self): - yv2 = yv(1,.2) - assert_almost_equal(yv2,-3.3238249881118471,8) - -class test_yve(NumpyTestCase): - def check_negv(self): - assert_almost_equal(yve(-3,2), -yve(3,2), 14) - - def check_yve(self): - yve2 = yve(1,.2) - assert_almost_equal(yve2,-3.3238249881118471,8) - yve2r = yv(1,.2+1j)*exp(-1) - yve22 = yve(1,.2+1j) - assert_almost_equal(yve22,yve2r,8) - -class test_yvp(NumpyTestCase): - - def check_yvp(self): - yvpr = (yv(1,.2) - yv(3,.2))/2.0 - yvp1 = yvp(2,.2) - assert_array_almost_equal(yvp1,yvpr,10) - -class test_zeros(NumpyTestCase): - - def check_zeros(self): - b = zeros((1,11)) - assert_array_equal(b,array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]])) - c = zeros((2,2)) - assert_array_equal(c,array([[0, 0], - [0, 0]])) - if __name__ == "__main__": NumpyTest().run() From scipy-svn at scipy.org Tue May 29 13:00:00 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 29 May 2007 12:00:00 -0500 (CDT) Subject: [Scipy-svn] r3057 - trunk/Lib/stats Message-ID: <20070529170000.5A6E939C146@new.scipy.org> Author: edschofield Date: 2007-05-29 11:59:51 -0500 (Tue, 29 May 2007) New Revision: 3057 Modified: trunk/Lib/stats/stats.py Log: Grammar and indentation fixes for stats.py docstrings Modified: trunk/Lib/stats/stats.py =================================================================== --- trunk/Lib/stats/stats.py 2007-05-29 12:45:16 UTC (rev 3056) +++ trunk/Lib/stats/stats.py 2007-05-29 16:59:51 UTC (rev 3057) @@ -21,7 +21,6 @@ ################################################# ####### Written by: Gary Strangman ########### -####### Last modified: Apr 13, 2000 ########### ################################################# A collection of basic statistical functions for python. The function @@ -124,7 +123,8 @@ """ ## CHANGE LOG: ## =========== -## 29-11-05 ... fixed default axis to be 0 for consistency with scipy; +## since 2001-06-25 ... see scipy SVN changelog +## 05-11-29 ... fixed default axis to be 0 for consistency with scipy; ## cleanup of redundant imports, dead code, {0,1} -> booleans ## 02-02-10 ... require Numeric, eliminate "list-only" functions ## (only 1 set of functions now and no Dispatch class), @@ -861,8 +861,8 @@ Based on the D'Agostino and Pearson's test that combines skew and kurtosis to produce an omnibus test of normality. - D'Agostino, R. B. and Pearson, E. S. (1971), "An Omnibus Test of Normality for - Moderate and Large Sample Size," Biometrika, 58, 341-348 + D'Agostino, R. B. and Pearson, E. S. (1971), "An Omnibus Test of + Normality for Moderate and Large Sample Size," Biometrika, 58, 341-348 D'Agostino, R. B. and Pearson, E. S. (1973), "Testing for departures from Normality," Biometrika, 60, 613-622 @@ -1297,7 +1297,7 @@ Normalization is by (N-1) where N is the number of observations (unbiased estimate). If bias is True then normalization is by N. - If rowvar is False, then each row is a variables with + If rowvar is False, then each row is a variable with observations in the columns. """ m = asarray(m) @@ -1372,18 +1372,18 @@ """Calculates a Pearson correlation coefficient and the p-value for testing non-correlation. - The Pearson correlation coefficient measures the linear relationship between - two datasets. Strictly speaking, Pearson's correlation requires that each - dataset be normally distributed. Like other correlation coefficients, this - one varies between -1 and +1 with 0 implying no correlation. Correlations of - -1 or +1 imply an exact linear relationship. Positive correlations imply - that as x increases, so does y. Negative correlations imply that as - x increases, y decreases. + The Pearson correlation coefficient measures the linear relationship + between two datasets. Strictly speaking, Pearson's correlation requires + that each dataset be normally distributed. Like other correlation + coefficients, this one varies between -1 and +1 with 0 implying no + correlation. Correlations of -1 or +1 imply an exact linear + relationship. Positive correlations imply that as x increases, so does + y. Negative correlations imply that as x increases, y decreases. The p-value roughly indicates the probability of an uncorrelated system - producing datasets that have a Pearson correlation at least as extreme as - the one computed from these datasets. The p-values are not entirely reliable - but are probably reasonable for datasets larger than 500 or so. + producing datasets that have a Pearson correlation at least as extreme + as the one computed from these datasets. The p-values are not entirely + reliable but are probably reasonable for datasets larger than 500 or so. Parameters ---------- @@ -1431,15 +1431,16 @@ The Spearman correlation is a nonparametric measure of the linear relationship between two datasets. Unlike the Pearson correlation, the Spearman correlation does not assume that both datasets are normally - distributed. Like other correlation coefficients, this one varies between -1 - and +1 with 0 implying no correlation. Correlations of -1 or +1 imply an - exact linear relationship. Positive correlations imply that as x increases, - so does y. Negative correlations imply that as x increases, y decreases. + distributed. Like other correlation coefficients, this one varies + between -1 and +1 with 0 implying no correlation. Correlations of -1 or + +1 imply an exact linear relationship. Positive correlations imply that + as x increases, so does y. Negative correlations imply that as x + increases, y decreases. The p-value roughly indicates the probability of an uncorrelated system - producing datasets that have a Spearman correlation at least as extreme as - the one computed from these datasets. The p-values are not entirely reliable - but are probably reasonable for datasets larger than 500 or so. + producing datasets that have a Spearman correlation at least as extreme + as the one computed from these datasets. The p-values are not entirely + reliable but are probably reasonable for datasets larger than 500 or so. Parameters ---------- @@ -1487,10 +1488,11 @@ """Calculates a point biserial correlation coefficient and the associated p-value. - The point biserial correlation is used to measure the relationship between - a binary variable, x, and a continuous variable, y. Like other correlation - coefficients, this one varies between -1 and +1 with 0 implying no - correlation. Correlations of -1 or +1 imply a determinative relationship. + The point biserial correlation is used to measure the relationship + between a binary variable, x, and a continuous variable, y. Like other + correlation coefficients, this one varies between -1 and +1 with 0 + implying no correlation. Correlations of -1 or +1 imply a determinative + relationship. Parameters ---------- @@ -1569,13 +1571,12 @@ def linregress(*args): + """Calculates a regression line on two arrays, x and y, corresponding to + x,y pairs. If a single 2D array is passed, linregress finds dim with 2 + levels and splits data into x,y pairs along that dim. + + Returns: slope, intercept, r, two-tailed prob, stderr-of-the-estimate """ -Calculates a regression line on two arrays, x and y, corresponding to x,y -pairs. If a single 2D array is passed, linregress finds dim with 2 levels -and splits data into x,y pairs along that dim. - -Returns: slope, intercept, r, two-tailed prob, stderr-of-the-estimate -""" TINY = 1.0e-20 if len(args) == 1: # more than 1D array? args = asarray(args[0]) @@ -1633,14 +1634,12 @@ def ttest_ind(a, b, axis=0): + """Calculates the t-obtained T-test on TWO INDEPENDENT samples of scores + a, and b. From Numerical Recipies, p.483. Axis can equal None (ravel + array first), or an integer (the axis over which to operate on a and b). + + Returns: t-value, two-tailed p-value """ -Calculates the t-obtained T-test on TWO INDEPENDENT samples of scores -a, and b. From Numerical Recipies, p.483. Axis -can equal None (ravel array first), or an integer (the axis over -which to operate on a and b). - -Returns: t-value, two-tailed p-value -""" a, b, axis = _chk2_asarray(a, b, axis) x1 = mean(a,axis) x2 = mean(b,axis) @@ -1663,14 +1662,12 @@ def ttest_rel(a,b,axis=None): + """Calculates the t-obtained T-test on TWO RELATED samples of scores, a + and b. From Numerical Recipies, p.483. Axis can equal None (ravel array + first), or an integer (the axis over which to operate on a and b). + + Returns: t-value, two-tailed p-value """ -Calculates the t-obtained T-test on TWO RELATED samples of scores, a -and b. From Numerical Recipies, p.483. Axis -can equal None (ravel array first), or an integer (the axis over -which to operate on a and b). - -Returns: t-value, two-tailed p-value -""" a, b, axis = _chk2_asarray(a, b, axis) if len(a)!=len(b): raise ValueError, 'unequal length arrays' @@ -1698,11 +1695,10 @@ import scipy.stats import distributions def kstest(rvs, cdf, args=(), N=20): - """Return the D-value and the p-value for a - Kolmogorov-Smirnov test of the null that N RV's generated by - the rvs fits the cdf given the extra arguments. rvs - needs to accept the size= keyword if a function. rvs can also - be a vector of RVs. + """Return the D-value and the p-value for a Kolmogorov-Smirnov test of + the null that N RV's generated by the rvs fits the cdf given the extra + arguments. rvs needs to accept the size= keyword if a function. rvs + can also be a vector of RVs. cdf can be a function or a string indicating the distriubtion type. @@ -1729,14 +1725,13 @@ return D, distributions.ksone.sf(D,N) def chisquare(f_obs, f_exp=None): + """ Calculates a one-way chi square for array of observed frequencies + and returns the result. If no expected frequencies are given, the total + N is assumed to be equally distributed across all groups. + + Returns: chisquare-statistic, associated p-value """ -Calculates a one-way chi square for array of observed frequencies and returns -the result. If no expected frequencies are given, the total N is assumed to -be equally distributed across all groups. -Returns: chisquare-statistic, associated p-value -""" - f_obs = asarray(f_obs) k = len(f_obs) if f_exp is None: @@ -1747,13 +1742,12 @@ def ks_2samp(data1, data2): + """ Computes the Kolmogorov-Smirnof statistic on 2 samples. Modified + from Numerical Recipies in C, page 493. Returns KS D-value, prob. Not + ufunc- like. + + Returns: KS D-value, p-value """ -Computes the Kolmogorov-Smirnof statistic on 2 samples. Modified from -Numerical Recipies in C, page 493. Returns KS D-value, prob. Not ufunc- -like. - -Returns: KS D-value, p-value -""" data1, data2 = map(asarray, (data1, data2)) j1 = 0 # zeros(data1.shape[1:]) TRIED TO MAKE THIS UFUNC-LIKE j2 = 0 # zeros(data2.shape[1:]) @@ -1787,15 +1781,14 @@ def mannwhitneyu(x, y): + """Calculates a Mann-Whitney U statistic on the provided scores and + returns the result. Use only when the n in each condition is < 20 and + you have 2 independent samples of ranks. REMEMBER: Mann-Whitney U is + significant if the u-obtained is LESS THAN or equal to the critical + value of U. + + Returns: u-statistic, one-tailed p-value (i.e., p(z(U))) """ -Calculates a Mann-Whitney U statistic on the provided scores and -returns the result. Use only when the n in each condition is < 20 and -you have 2 independent samples of ranks. REMEMBER: Mann-Whitney U is -significant if the u-obtained is LESS THAN or equal to the critical -value of U. - -Returns: u-statistic, one-tailed p-value (i.e., p(z(U))) -""" x = asarray(x) y = asarray(y) n1 = len(x) @@ -1816,14 +1809,13 @@ def tiecorrect(rankvals): + """Tie-corrector for ties in Mann Whitney U and Kruskal Wallis H tests. + See Siegel, S. (1956) Nonparametric Statistics for the Behavioral + Sciences. New York: McGraw-Hill. Code adapted from |Stat rankind.c + code. + + Returns: T correction factor for U or H """ -Tie-corrector for ties in Mann Whitney U and Kruskal Wallis H tests. -See Siegel, S. (1956) Nonparametric Statistics for the Behavioral -Sciences. New York: McGraw-Hill. Code adapted from |Stat rankind.c -code. - -Returns: T correction factor for U or H -""" sorted,posn = fastsort(asarray(rankvals)) n = len(sorted) T = 0.0 @@ -1841,12 +1833,11 @@ def ranksums(x, y): + """Calculates the rank sums statistic on the provided scores and + returns the result. + + Returns: z-statistic, two-tailed p-value """ -Calculates the rank sums statistic on the provided scores and returns -the result. - -Returns: z-statistic, two-tailed p-value -""" x,y = map(np.asarray, (x, y)) n1 = len(x) n2 = len(y) @@ -1863,14 +1854,13 @@ def kruskal(*args): - """ -The Kruskal-Wallis H-test is a non-parametric ANOVA for 2 or more -groups, requiring at least 5 subjects in each group. This function -calculates the Kruskal-Wallis H and associated p-value for 2 or more -independent samples. + """The Kruskal-Wallis H-test is a non-parametric ANOVA for 2 or more + groups, requiring at least 5 subjects in each group. This function + calculates the Kruskal-Wallis H and associated p-value for 2 or more + independent samples. -Returns: H-statistic (corrected for ties), associated p-value -""" + Returns: H-statistic (corrected for ties), associated p-value + """ assert len(args) >= 2, "Need at least 2 groups in stats.kruskal()" n = map(len,args) all = [] @@ -1897,16 +1887,15 @@ def friedmanchisquare(*args): + """Friedman Chi-Square is a non-parametric, one-way within-subjects + ANOVA. This function calculates the Friedman Chi-square test for + repeated measures and returns the result, along with the associated + probability value. It assumes 3 or more repeated measures. Only 3 + levels requires a minimum of 10 subjects in the study. Four levels + requires 5 subjects per level(??). + + Returns: chi-square statistic, associated p-value """ -Friedman Chi-Square is a non-parametric, one-way within-subjects -ANOVA. This function calculates the Friedman Chi-square test for -repeated measures and returns the result, along with the associated -probability value. It assumes 3 or more repeated measures. Only 3 -levels requires a minimum of 10 subjects in the study. Four levels -requires 5 subjects per level(??). - -Returns: chi-square statistic, associated p-value -""" k = len(args) if k < 3: raise ValueError, '\nLess than 3 levels. Friedman test not appropriate.\n' @@ -1928,8 +1917,8 @@ erfc = special.erfc def chisqprob(chisq, df): - """Returns the (1-tail) probability value associated with the provided chi-square - value and degrees of freedom. + """Returns the (1-tail) probability value associated with the provided + chi-square value and degrees of freedom. Broadcasting rules apply. @@ -1978,15 +1967,15 @@ ##################################### def glm(data, para): - """ -Calculates a linear model fit ... anova/ancova/lin-regress/t-test/etc. Taken -from: + """Calculates a linear model fit ... + anova/ancova/lin-regress/t-test/etc. Taken from: + Peterson et al. Statistical limitations in functional neuroimaging I. Non-inferential methods and statistical models. Phil Trans Royal Soc Lond B 354: 1239-1260. - -Returns: statistic, p-value ??? -""" + + Returns: statistic, p-value ??? + """ if len(para) != len(data): print "data and para must be same length in aglm" return @@ -2011,11 +2000,9 @@ def f_value_wilks_lambda(ER, EF, dfnum, dfden, a, b): + """Calculation of Wilks lambda F-statistic for multivarite data, per + Maxwell & Delaney p.657. """ -Calculation of Wilks lambda F-statistic for multivarite data, per -Maxwell & Delaney p.657. - -""" if isinstance(ER, (int, float)): ER = array([[ER]]) if isinstance(EF, (int, float)): @@ -2030,26 +2017,24 @@ return n_um / d_en def f_value(ER, EF, dfR, dfF): - """ -Returns an F-statistic given the following: + """Returns an F-statistic given the following: ER = error associated with the null hypothesis (the Restricted model) EF = error associated with the alternate hypothesis (the Full model) dfR = degrees of freedom the Restricted model dfF = degrees of freedom associated with the Restricted model -""" + """ return ((ER-EF)/float(dfR-dfF) / (EF/float(dfF))) def f_value_multivariate(ER, EF, dfnum, dfden): - """ -Returns an F-statistic given the following: + """Returns an F-statistic given the following: ER = error associated with the null hypothesis (the Restricted model) EF = error associated with the alternate hypothesis (the Full model) dfR = degrees of freedom the Restricted model dfF = degrees of freedom associated with the Restricted model -where ER and EF are matrices from a multivariate F calculation. -""" + where ER and EF are matrices from a multivariate F calculation. + """ if isinstance(ER, (int, float)): ER = array([[ER]]) if isinstance(EF, (int, float)): @@ -2064,8 +2049,8 @@ ##################################### def ss(a, axis=0): - """Squares each value in the passed array, adds these squares, and returns the - result. + """Squares each value in the passed array, adds these squares, and + returns the result. Parameters ---------- @@ -2115,9 +2100,9 @@ def rankdata(a): """Ranks the data in a, dealing with ties appropriately. - Equal values are assigned a rank that is the average of the ranks that would - have been otherwise assigned to all of the values within that set. Ranks - begin at 1, not 0. + Equal values are assigned a rank that is the average of the ranks that + would have been otherwise assigned to all of the values within that set. + Ranks begin at 1, not 0. Example ------- From scipy-svn at scipy.org Tue May 29 13:46:06 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Tue, 29 May 2007 12:46:06 -0500 (CDT) Subject: [Scipy-svn] r3058 - trunk/Lib/special Message-ID: <20070529174606.3C64539C047@new.scipy.org> Author: cookedm Date: 2007-05-29 12:46:03 -0500 (Tue, 29 May 2007) New Revision: 3058 Modified: trunk/Lib/special/info.py Log: scipy.special.info.__doc__ converted to restructedtext, courtesy of Gael Varoquaux and the number #306. Modified: trunk/Lib/special/info.py =================================================================== --- trunk/Lib/special/info.py 2007-05-29 16:59:51 UTC (rev 3057) +++ trunk/Lib/special/info.py 2007-05-29 17:46:03 UTC (rev 3058) @@ -1,338 +1,351 @@ """ -Special Functions -================= +Airy Functions +-------------- - Airy Functions +* airy -- Airy functions and their derivatives. +* airye -- Exponentially scaled Airy functions +* ai_zeros -- [+]Zeros of Airy functions Ai(x) and Ai'(x) +* bi_zeros -- [+]Zeros of Airy functions Bi(x) and Bi'(x) - airy -- Airy functions and their derivatives. - airye -- Exponentially scaled Airy functions - ai_zeros -- **Zeros of Airy functions Ai(x) and Ai'(x) - bi_zeros -- **Zeros of Airy functions Bi(x) and Bi'(x) +Elliptic Functions and Integrals +-------------------------------- - Elliptic Functions and Integrals +* ellipj -- Jacobian elliptic functions +* ellipk -- Complete elliptic integral of the first kind. +* ellipkinc -- Incomplete elliptic integral of the first kind. +* ellipe -- Complete elliptic integral of the second kind. +* ellipeinc -- Incomplete elliptic integral of the second kind. - ellipj -- Jacobian elliptic functions - ellipk -- Complete elliptic integral of the first kind. - ellipkinc -- Incomplete elliptic integral of the first kind. - ellipe -- Complete elliptic integral of the second kind. - ellipeinc -- Incomplete elliptic integral of the second kind. +Bessel Functions +---------------- - Bessel Functions +* jn -- Bessel function of integer order and real argument. +* jv -- Bessel function of real-valued order and complex argument. +* jve -- Exponentially scaled Bessel function. +* yn -- Bessel function of second kind (integer order). +* yv -- Bessel function of the second kind (real-valued order). +* yve -- Exponentially scaled Bessel function of the second kind. +* kn -- Modified Bessel function of the third kind (integer order). +* kv -- Modified Bessel function of the third kind (real order). +* kve -- Exponentially scaled modified Bessel function of the third kind. +* iv -- Modified Bessel function. +* ive -- Exponentially scaled modified Bessel function. +* hankel1 -- Hankel function of the first kind. +* hankel1e -- Exponentially scaled Hankel function of the first kind. +* hankel2 -- Hankel function of the second kind. +* hankel2e -- Exponentially scaled Hankel function of the second kind. +* lmbda -- [+]Sequence of lambda functions with arbitrary order v. - jn -- Bessel function of integer order and real argument. - jv -- Bessel function of real-valued order and complex argument. - jve -- Exponentially scaled Bessel function. - yn -- Bessel function of second kind (integer order). - yv -- Bessel function of the second kind (real-valued order). - yve -- Exponentially scaled Bessel function of the second kind. - kn -- Modified Bessel function of the third kind (integer order). - kv -- Modified Bessel function of the third kind (real order). - kve -- Exponentially scaled modified Bessel function of the - third kind. - iv -- Modified Bessel function. - ive -- Exponentially scaled modified Bessel function. - hankel1 -- Hankel function of the first kind. - hankel1e -- Exponentially scaled Hankel function of the first kind. - hankel2 -- Hankel function of the second kind. - hankel2e -- Exponentially scaled Hankel function of the second kind. +Zeros of Bessel Functions +......................... - lmbda -- **Sequence of lambda functions with arbitrary order v. +* jnjnp_zeros -- [+]Zeros of integer-order Bessel functions and derivatives sorted in order. +* jnyn_zeros -- [+]Zeros of integer-order Bessel functions and derivatives as separate arrays. +* jn_zeros -- [+]Zeros of Jn(x) +* jnp_zeros -- [+]Zeros of Jn'(x) +* yn_zeros -- [+]Zeros of Yn(x) +* ynp_zeros -- [+]Zeros of Yn'(x) +* y0_zeros -- [+]Complex zeros: Y0(z0)=0 and values of Y0'(z0) +* y1_zeros -- [+]Complex zeros: Y1(z1)=0 and values of Y1'(z1) +* y1p_zeros -- [+]Complex zeros of Y1'(z1')=0 and values of Y1(z1') - Zeros of Bessel Functions +Faster versions of common Bessel Functions +.......................................... - jnjnp_zeros -- **Zeros of integer-order Bessel functions and derivatives - sorted in order. - jnyn_zeros -- **Zeros of integer-order Bessel functions and derivatives - as separate arrays. - jn_zeros -- **Zeros of Jn(x) - jnp_zeros -- **Zeros of Jn'(x) - yn_zeros -- **Zeros of Yn(x) - ynp_zeros -- **Zeros of Yn'(x) - y0_zeros -- **Complex zeros: Y0(z0)=0 and values of Y0'(z0) - y1_zeros -- **Complex zeros: Y1(z1)=0 and values of Y1'(z1) - y1p_zeros -- **Complex zeros of Y1'(z1')=0 and values of Y1(z1') +* j0 -- Bessel function of order 0. +* j1 -- Bessel function of order 1. +* y0 -- Bessel function of second kind of order 0. +* y1 -- Bessel function of second kind of order 1. +* i0 -- Modified Bessel function of order 0. +* i0e -- Exponentially scaled modified Bessel function of order 0. +* i1 -- Modified Bessel function of order 1. +* i1e -- Exponentially scaled modified Bessel function of order 1. +* k0 -- Modified Bessel function of the third kind of order 0. +* k0e -- Exponentially scaled modified Bessel function of the third kind of order 0. +* k1 -- Modified Bessel function of the third kind of order 1. +* k1e -- Exponentially scaled modified Bessel function of the third kind of order 1. - Faster versions of common Bessel Functions. +Integrals of Bessel Functions +............................. - j0 -- Bessel function of order 0. - j1 -- Bessel function of order 1. - y0 -- Bessel function of second kind of order 0. - y1 -- Bessel function of second kind of order 1. - i0 -- Modified Bessel function of order 0. - i0e -- Exponentially scaled modified Bessel function of order 0. - i1 -- Modified Bessel function of order 1. - i1e -- Exponentially scaled modified Bessel function of order 1. - k0 -- Modified Bessel function of the third kind of order 0. - k0e -- Exponentially scaled modified Bessel function of the - third kind of order 0. - k1 -- Modified Bessel function of the third kind of order 1. - k1e -- Exponentially scaled modified Bessel function of the - third kind of order 1. +* itj0y0 -- Basic integrals of j0 and y0 from 0 to x. +* it2j0y0 -- Integrals of (1-j0(t))/t from 0 to x and y0(t)/t from x to inf. +* iti0k0 -- Basic integrals of i0 and k0 from 0 to x. +* it2i0k0 -- Integrals of (i0(t)-1)/t from 0 to x and k0(t)/t from x to inf. +* besselpoly -- Integral of a bessel function: Jv(2* a* x) * x[+]lambda from x=0 to 1. - Integrals of Bessel Functions. +Derivatives of Bessel Functions +............................... - itj0y0 -- Basic integrals of j0 and y0 from 0 to x. - it2j0y0 -- Integrals of (1-j0(t))/t from 0 to x and - y0(t)/t from x to inf. - iti0k0 -- Basic integrals of i0 and k0 from 0 to x. - it2i0k0 -- Integrals of (i0(t)-1)/t from 0 to x and - k0(t)/t from x to inf. - besselpoly -- Integral of a bessel function: Jv(2*a*x) * x**lambda - from x=0 to 1. +* jvp -- Nth derivative of Jv(v,z) +* yvp -- Nth derivative of Yv(v,z) +* kvp -- Nth derivative of Kv(v,z) +* ivp -- Nth derivative of Iv(v,z) +* h1vp -- Nth derivative of H1v(v,z) +* h2vp -- Nth derivative of H2v(v,z) - Derivatives of Bessel Functions. +Spherical Bessel Functions +.......................... - jvp -- Nth derivative of Jv(v,z) - yvp -- Nth derivative of Yv(v,z) - kvp -- Nth derivative of Kv(v,z) - ivp -- Nth derivative of Iv(v,z) - h1vp -- Nth derivative of H1v(v,z) - h2vp -- Nth derivative of H2v(v,z) +* sph_jn -- [+]Sequence of spherical Bessel functions, jn(z) +* sph_yn -- [+]Sequence of spherical Bessel functions, yn(z) +* sph_jnyn -- [+]Sequence of spherical Bessel functions, jn(z) and yn(z) +* sph_in -- [+]Sequence of spherical Bessel functions, in(z) +* sph_kn -- [+]Sequence of spherical Bessel functions, kn(z) +* sph_inkn -- [+]Sequence of spherical Bessel functions, in(z) and kn(z) - Spherical Bessel Functions +Ricatti-Bessel Functions +........................ - sph_jn -- **Sequence of spherical Bessel functions, jn(z) - sph_yn -- **Sequence of spherical Bessel functions, yn(z) - sph_jnyn -- **Sequence of spherical Bessel functions, jn(z) and yn(z) - sph_in -- **Sequence of spherical Bessel functions, in(z) - sph_kn -- **Sequence of spherical Bessel functions, kn(z) - sph_inkn -- **Sequence of spherical Bessel functions, in(z) and kn(z) +* riccati_jn -- [+]Sequence of Ricatti-Bessel functions of first kind. +* riccati_yn -- [+]Sequence of Ricatti-Bessel functions of second kind. - Ricatti-Bessel Functions +Struve Functions +---------------- - riccati_jn -- **Sequence of Ricatti-Bessel functions of first kind. - riccati_yn -- **Sequence of Ricatti-Bessel functions of second kind. +* struve -- Struve function --- Hv(x) +* modstruve -- Modified struve function --- Lv(x) +* itstruve0 -- Integral of H0(t) from 0 to x +* it2struve0 -- Integral of H0(t)/t from x to Inf. +* itmodstruve0 -- Integral of L0(t) from 0 to x. - Struve Functions - struve -- Struve function --- Hv(x) - modstruve -- Modified struve function --- Lv(x) - itstruve0 -- Integral of H0(t) from 0 to x - it2struve0 -- Integral of H0(t)/t from x to Inf. - itmodstruve0 -- Integral of L0(t) from 0 to x. +Raw Statistical Functions (Friendly versions in scipy.stats) +------------------------------------------------------------ +* bdtr -- Sum of terms 0 through k of of the binomial pdf. +* bdtrc -- Sum of terms k+1 through n of the binomial pdf. +* bdtri -- Inverse of bdtr +* btdtr -- Integral from 0 to x of beta pdf. +* btdtri -- Quantiles of beta distribution +* fdtr -- Integral from 0 to x of F pdf. +* fdtrc -- Integral from x to infinity under F pdf. +* fdtri -- Inverse of fdtrc +* gdtr -- Integral from 0 to x of gamma pdf. +* gdtrc -- Integral from x to infinity under gamma pdf. +* gdtri -- Quantiles of gamma distribution +* nbdtr -- Sum of terms 0 through k of the negative binomial pdf. +* nbdtrc -- Sum of terms k+1 to infinity under negative binomial pdf. +* nbdtri -- Inverse of nbdtr +* pdtr -- Sum of terms 0 through k of the Poisson pdf. +* pdtrc -- Sum of terms k+1 to infinity of the Poisson pdf. +* pdtri -- Inverse of pdtr +* stdtr -- Integral from -infinity to t of the Student-t pdf. +* stdtri -- Inverse of stdtr (quantiles) +* chdtr -- Integral from 0 to x of the Chi-square pdf. +* chdtrc -- Integral from x to infnity of Chi-square pdf. +* chdtri -- Inverse of chdtrc. +* ndtr -- Integral from -infinity to x of standard normal pdf +* ndtri -- Inverse of ndtr (quantiles) +* smirnov -- Kolmogorov-Smirnov complementary CDF for one-sided test statistic (Dn+ or Dn-) +* smirnovi -- Inverse of smirnov. +* kolmogorov -- The complementary CDF of the (scaled) two-sided test statistic (Kn*) valid for large n. +* kolmogi -- Inverse of kolmogorov +* tklmbda -- Tukey-Lambda CDF - Raw Statistical Functions (Friendly versions in scipy.stats) +Gamma and Related Functions +--------------------------- - bdtr -- Sum of terms 0 through k of of the binomial pdf. - bdtrc -- Sum of terms k+1 through n of the binomial pdf. - bdtri -- Inverse of bdtr - btdtr -- Integral from 0 to x of beta pdf. - btdtri -- Quantiles of beta distribution - fdtr -- Integral from 0 to x of F pdf. - fdtrc -- Integral from x to infinity under F pdf. - fdtri -- Inverse of fdtrc - gdtr -- Integral from 0 to x of gamma pdf. - gdtrc -- Integral from x to infinity under gamma pdf. - gdtri -- Quantiles of gamma distribution - nbdtr -- Sum of terms 0 through k of the negative binomial pdf. - nbdtrc -- Sum of terms k+1 to infinity under negative binomial pdf. - nbdtri -- Inverse of nbdtr - pdtr -- Sum of terms 0 through k of the Poisson pdf. - pdtrc -- Sum of terms k+1 to infinity of the Poisson pdf. - pdtri -- Inverse of pdtr - stdtr -- Integral from -infinity to t of the Student-t pdf. - stdtri -- Inverse of stdtr (quantiles) - chdtr -- Integral from 0 to x of the Chi-square pdf. - chdtrc -- Integral from x to infnity of Chi-square pdf. - chdtri -- Inverse of chdtrc. - ndtr -- Integral from -infinity to x of standard normal pdf - ndtri -- Inverse of ndtr (quantiles) - smirnov -- Kolmogorov-Smirnov complementary CDF for one-sided - test statistic (Dn+ or Dn-) - smirnovi -- Inverse of smirnov. - kolmogorov -- The complementary CDF of the (scaled) two-sided test - statistic (Kn*) valid for large n. - kolmogi -- Inverse of kolmogorov - tklmbda -- Tukey-Lambda CDF +* gamma -- Gamma function. +* gammaln -- Log of the absolute value of the gamma function. +* gammainc -- Incomplete gamma integral. +* gammaincinv -- Inverse of gammainc. +* gammaincc -- Complemented incomplete gamma integral. +* gammainccinv -- Inverse of gammaincc. +* beta -- Beta function. +* betaln -- Log of the absolute value of the beta function. +* betainc -- Incomplete beta integral. +* betaincinv -- Inverse of betainc. +* betaincinva -- Inverse (in first argument, a) of betainc +* betaincinvb -- Inverse (in first argument, b) of betainc +* psi(digamma) -- Logarithmic derivative of the gamma function. +* rgamma -- One divided by the gamma function. +* polygamma -- Nth derivative of psi function. - Gamma and Related Functions +Error Function and Fresnel Integrals +------------------------------------ - gamma -- Gamma function. - gammaln -- Log of the absolute value of the gamma function. - gammainc -- Incomplete gamma integral. - gammaincinv -- Inverse of gammainc. - gammaincc -- Complemented incomplete gamma integral. - gammainccinv -- Inverse of gammaincc. - beta -- Beta function. - betaln -- Log of the absolute value of the beta function. - betainc -- Incomplete beta integral. - betaincinv -- Inverse of betainc. - betaincinva -- Inverse (in first argument, a) of betainc - betaincinvb -- Inverse (in first argument, b) of betainc - psi(digamma) -- Logarithmic derivative of the gamma function. - rgamma -- One divided by the gamma function. - polygamma -- Nth derivative of psi function. +* erf -- Error function. +* erfc -- Complemented error function (1- erf(x)) +* erfinv -- Inverse of error function +* erfcinv -- Inverse of erfc +* erf_zeros -- [+]Complex zeros of erf(z) +* fresnel -- Fresnel sine and cosine integrals. +* fresnel_zeros -- Complex zeros of both Fresnel integrals +* fresnelc_zeros -- [+]Complex zeros of fresnel cosine integrals +* fresnels_zeros -- [+]Complex zeros of fresnel sine integrals +* modfresnelp -- Modified Fresnel integrals F_+(x) and K_+(x) +* modfresnelm -- Modified Fresnel integrals F_-(x) and K_-(x) - Error Function and Fresnel Integrals +Legendre Functions +------------------ - erf -- Error function. - erfc -- Complemented error function (1- erf(x)) - erfinv -- Inverse of error function - erfcinv -- Inverse of erfc - erf_zeros -- **Complex zeros of erf(z) - fresnel -- Fresnel sine and cosine integrals. - fresnel_zeros -- Complex zeros of both Fresnel integrals - fresnelc_zeros -- **Complex zeros of fresnel cosine integrals - fresnels_zeros -- **Complex zeros of fresnel sine integrals - modfresnelp -- Modified Fresnel integrals F_+(x) and K_+(x) - modfresnelm -- Modified Fresnel integrals F_-(x) and K_-(x) +* lpn -- [+]Legendre Functions (polynomials) of the first kind +* lqn -- [+]Legendre Functions of the second kind. +* lpmn -- [+]Associated Legendre Function of the first kind. +* lqmn -- [+]Associated Legendre Function of the second kind. +* lpmv -- Associated Legendre Function of arbitrary non-negative degree v. +* sph_harm -- Spherical Harmonics (complex-valued) Y^m_n(theta,phi) - Legendre Functions +Orthogonal polynomials --- 15 types + These functions all return a polynomial class which can then be + evaluated: vals = chebyt(n)(x) + This class also has an attribute 'weights' which + return the roots, weights, and total weights for the appropriate + form of Gaussian quadrature. These are returned in an n x 3 array with roots + in the first column, weights in the second column, and total weights in the final + column - lpn -- **Legendre Functions (polynomials) of the first kind - lqn -- **Legendre Functions of the second kind. - lpmn -- **Associated Legendre Function of the first kind. - lqmn -- **Associated Legendre Function of the second kind. - lpmv -- Associated Legendre Function of arbitrary non-negative - degree v. - sph_harm -- Spherical Harmonics (complex-valued) Y^m_n(theta,phi) +* legendre -- [+]Legendre polynomial P_n(x) (lpn -- for function). +* chebyt -- [+]Chebyshev polynomial T_n(x) +* chebyu -- [+]Chebyshev polynomial U_n(x) +* chebyc -- [+]Chebyshev polynomial C_n(x) +* chebys -- [+]Chebyshev polynomial S_n(x) +* jacobi -- [+]Jacobi polynomial P^(alpha,beta)_n(x) +* laguerre -- [+]Laguerre polynomial, L_n(x) +* genlaguerre -- [+]Generalized (Associated) Laguerre polynomial, L^alpha_n(x) +* hermite -- [+]Hermite polynomial H_n(x) +* hermitenorm -- [+]Normalized Hermite polynomial, He_n(x) +* gegenbauer -- [+]Gegenbauer (Ultraspherical) polynomials, C^(alpha)_n(x) +* sh_legendre -- [+]shifted Legendre polynomial, P*_n(x) +* sh_chebyt -- [+]shifted Chebyshev polynomial, T*_n(x) +* sh_chebyu -- [+]shifted Chebyshev polynomial, U*_n(x) +* sh_jacobi -- [+]shifted Jacobi polynomial, J*_n(x) = G^(p,q)_n(x) - Orthogonal polynomials --- 15 types - ** These functions all return a polynomial class which can then be - evaluated: vals = chebyt(n)(x) - This class also has an attribute 'weights' which - return the roots, weights, and total weights for the appropriate - form of Gaussian quadrature. These are returned in an n x 3 array with roots - in the first column, weights in the second column, and total weights in the final - column +HyperGeometric Functions +------------------------ - legendre -- **Legendre polynomial P_n(x) (lpn -- for function). - chebyt -- **Chebyshev polynomial T_n(x) - chebyu -- **Chebyshev polynomial U_n(x) - chebyc -- **Chebyshev polynomial C_n(x) - chebys -- **Chebyshev polynomial S_n(x) - jacobi -- **Jacobi polynomial P^(alpha,beta)_n(x) - laguerre -- **Laguerre polynomial, L_n(x) - genlaguerre -- **Generalized (Associated) Laguerre polynomial, L^alpha_n(x) - hermite -- **Hermite polynomial H_n(x) - hermitenorm -- **Normalized Hermite polynomial, He_n(x) - gegenbauer -- **Gegenbauer (Ultraspherical) polynomials, C^(alpha)_n(x) - sh_legendre -- **shifted Legendre polynomial, P*_n(x) - sh_chebyt -- **shifted Chebyshev polynomial, T*_n(x) - sh_chebyu -- **shifted Chebyshev polynomial, U*_n(x) - sh_jacobi -- **shifted Jacobi polynomial, J*_n(x) = G^(p,q)_n(x) +* hyp2f1 -- Gauss hypergeometric function (2F1) +* hyp1f1 -- Confluent hypergeometric function (1F1) +* hyperu -- Confluent hypergeometric function (U) +* hyp0f1 -- Confluent hypergeometric limit function (0F1) +* hyp2f0 -- Hypergeometric function (2F0) +* hyp1f2 -- Hypergeometric function (1F2) +* hyp3f0 -- Hypergeometric function (3F0) - HyperGeometric Functions +Parabolic Cylinder Functions +---------------------------- - hyp2f1 -- Gauss hypergeometric function (2F1) - hyp1f1 -- Confluent hypergeometric function (1F1) - hyperu -- Confluent hypergeometric function (U) - hyp0f1 -- Confluent hypergeometric limit function (0F1) - hyp2f0 -- Hypergeometric function (2F0) - hyp1f2 -- Hypergeometric function (1F2) - hyp3f0 -- Hypergeometric function (3F0) +* pbdv -- Parabolic cylinder function Dv(x) and derivative. +* pbvv -- Parabolic cylinder function Vv(x) and derivative. +* pbwa -- Parabolic cylinder function W(a,x) and derivative. +* pbdv_seq -- [+]Sequence of parabolic cylinder functions Dv(x) +* pbvv_seq -- [+]Sequence of parabolic cylinder functions Vv(x) +* pbdn_seq -- [+]Sequence of parabolic cylinder functions Dn(z), complex z - Parabolic Cylinder Functions +mathieu and Related Functions (and derivatives) +----------------------------------------------- - pbdv -- Parabolic cylinder function Dv(x) and derivative. - pbvv -- Parabolic cylinder function Vv(x) and derivative. - pbwa -- Parabolic cylinder function W(a,x) and derivative. - pbdv_seq -- **Sequence of parabolic cylinder functions Dv(x) - pbvv_seq -- **Sequence of parabolic cylinder functions Vv(x) - pbdn_seq -- **Sequence of parabolic cylinder functions Dn(z), complex z +* mathieu_a -- Characteristic values for even solution (ce_m) +* mathieu_b -- Characteristic values for odd solution (se_m) +* mathieu_even_coef -- [+]sequence of expansion coefficients for even solution +* mathieu_odd_coef -- [+]sequence of expansion coefficients for odd solution - mathieu and Related Functions (and derivatives) +**All the following return both function and first derivative** - mathieu_a -- Characteristic values for even solution (ce_m) - mathieu_b -- Characteristic values for odd solution (se_m) - mathieu_even_coef -- **sequence of expansion coefficients for even solution - mathieu_odd_coef -- **sequence of expansion coefficients for odd solution - ** All the following return both function and first derivative ** - mathieu_cem -- Even mathieu function - mathieu_sem -- Odd mathieu function - mathieu_modcem1 -- Even modified mathieu function of the first kind - mathieu_modcem2 -- Even modified mathieu function of the second kind - mathieu_modsem1 -- Odd modified mathieu function of the first kind - mathieu_modsem2 -- Odd modified mathieu function of the second kind +* mathieu_cem -- Even mathieu function +* mathieu_sem -- Odd mathieu function +* mathieu_modcem1 -- Even modified mathieu function of the first kind +* mathieu_modcem2 -- Even modified mathieu function of the second kind +* mathieu_modsem1 -- Odd modified mathieu function of the first kind +* mathieu_modsem2 -- Odd modified mathieu function of the second kind - Spheroidal Wave Functions +Spheroidal Wave Functions +------------------------- - pro_ang1 -- Prolate spheroidal angular function of the first kind - pro_rad1 -- Prolate spheroidal radial function of the first kind - pro_rad2 -- Prolate spheroidal radial function of the second kind - obl_ang1 -- Oblate spheroidal angluar function of the first kind - obl_rad1 -- Oblate spheroidal radial function of the first kind - obl_rad2 -- Oblate spheroidal radial function of the second kind - pro_cv -- Compute characteristic value for prolate functions - obl_cv -- Compute characteristic value for oblate functions - pro_cv_seq -- Compute sequence of prolate characteristic values - obl_cv_seq -- Compute sequence of oblate characteristic values - ** The following functions require pre-computed characteristic values ** - pro_ang1_cv -- Prolate spheroidal angular function of the first kind - pro_rad1_cv -- Prolate spheroidal radial function of the first kind - pro_rad2_cv -- Prolate spheroidal radial function of the second kind - obl_ang1_cv -- Oblate spheroidal angluar function of the first kind - obl_rad1_cv -- Oblate spheroidal radial function of the first kind - obl_rad2_cv -- Oblate spheroidal radial function of the second kind +* pro_ang1 -- Prolate spheroidal angular function of the first kind +* pro_rad1 -- Prolate spheroidal radial function of the first kind +* pro_rad2 -- Prolate spheroidal radial function of the second kind +* obl_ang1 -- Oblate spheroidal angluar function of the first kind +* obl_rad1 -- Oblate spheroidal radial function of the first kind +* obl_rad2 -- Oblate spheroidal radial function of the second kind +* pro_cv -- Compute characteristic value for prolate functions +* obl_cv -- Compute characteristic value for oblate functions +* pro_cv_seq -- Compute sequence of prolate characteristic values +* obl_cv_seq -- Compute sequence of oblate characteristic values - Kelvin Functions +**The following functions require pre-computed characteristic values** - kelvin -- All Kelvin functions (order 0) and derivatives. - kelvin_zeros -- **Zeros of All Kelvin functions (order 0) and derivatives - ber -- Kelvin function ber x - bei -- Kelvin function bei x - berp -- Derivative of Kelvin function ber x - beip -- Derivative of Kelvin function bei x - ker -- Kelvin function ker x - kei -- Kelvin function kei x - kerp -- Derivative of Kelvin function ker x - keip -- Derivative of Kelvin function kei x - ber_zeros -- **Zeros of Kelvin function bei x - bei_zeros -- **Zeros of Kelvin function ber x - berp_zeros -- **Zeros of derivative of Kelvin function ber x - beip_zeros -- **Zeros of derivative of Kelvin function bei x - ker_zeros -- **Zeros of Kelvin function kei x - kei_zeros -- **Zeros of Kelvin function ker x - kerp_zeros -- **Zeros of derivative of Kelvin function ker x - keip_zeros -- **Zeros of derivative of Kelvin function kei x +* pro_ang1_cv -- Prolate spheroidal angular function of the first kind +* pro_rad1_cv -- Prolate spheroidal radial function of the first kind +* pro_rad2_cv -- Prolate spheroidal radial function of the second kind +* obl_ang1_cv -- Oblate spheroidal angluar function of the first kind +* obl_rad1_cv -- Oblate spheroidal radial function of the first kind +* obl_rad2_cv -- Oblate spheroidal radial function of the second kind - Other Special Functions +Kelvin Functions +---------------- - expn -- Exponential integral. - exp1 -- Exponential integral of order 1 (for complex argument) - expi -- Another exponential integral -- Ei(x) - wofz -- Fadeeva function. - dawsn -- Dawson's integral. - shichi -- Hyperbolic sine and cosine integrals. - sici -- Integral of the sinc and "cosinc" functions. - spence -- Dilogarithm integral. - zeta -- Riemann zeta function of two arguments. - zetac -- 1.0 - standard Riemann zeta function. +* kelvin -- All Kelvin functions (order 0) and derivatives. +* kelvin_zeros -- [+]Zeros of All Kelvin functions (order 0) and derivatives +* ber -- Kelvin function ber x +* bei -- Kelvin function bei x +* berp -- Derivative of Kelvin function ber x +* beip -- Derivative of Kelvin function bei x +* ker -- Kelvin function ker x +* kei -- Kelvin function kei x +* kerp -- Derivative of Kelvin function ker x +* keip -- Derivative of Kelvin function kei x +* ber_zeros -- [+]Zeros of Kelvin function bei x +* bei_zeros -- [+]Zeros of Kelvin function ber x +* berp_zeros -- [+]Zeros of derivative of Kelvin function ber x +* beip_zeros -- [+]Zeros of derivative of Kelvin function bei x +* ker_zeros -- [+]Zeros of Kelvin function kei x +* kei_zeros -- [+]Zeros of Kelvin function ker x +* kerp_zeros -- [+]Zeros of derivative of Kelvin function ker x +* keip_zeros -- [+]Zeros of derivative of Kelvin function kei x - Convenience Functions +Other Special Functions +----------------------- - cbrt -- Cube root. - exp10 -- 10 raised to the x power. - exp2 -- 2 raised to the x power. - radian -- radian angle given degrees, minutes, and seconds. - cosdg -- cosine of the angle given in degrees. - sindg -- sine of the angle given in degrees. - tandg -- tangent of the angle given in degrees. - cotdg -- cotangent of the angle given in degrees. - log1p -- log(1+x) - expm1 -- exp(x)-1 - cosm1 -- cos(x)-1 - round -- round the argument to the nearest integer. If argument - ends in 0.5 exactly, pick the nearest even integer. +* expn -- Exponential integral. +* exp1 -- Exponential integral of order 1 (for complex argument) +* expi -- Another exponential integral -- Ei(x) +* wofz -- Fadeeva function. +* dawsn -- Dawson's integral. +* shichi -- Hyperbolic sine and cosine integrals. +* sici -- Integral of the sinc and "cosinc" functions. +* spence -- Dilogarithm integral. +* zeta -- Riemann zeta function of two arguments. +* zetac -- 1.0 - standard Riemann zeta function. - ** in the description indicates a function which is not a universal - function and does not follow broadcasting and automatic - array-looping rules. +Convenience Functions +--------------------- - Error handling: +* cbrt -- Cube root. +* exp10 -- 10 raised to the x power. +* exp2 -- 2 raised to the x power. +* radian -- radian angle given degrees, minutes, and seconds. +* cosdg -- cosine of the angle given in degrees. +* sindg -- sine of the angle given in degrees. +* tandg -- tangent of the angle given in degrees. +* cotdg -- cotangent of the angle given in degrees. +* log1p -- log(1+x) +* expm1 -- exp(x)-1 +* cosm1 -- cos(x)-1 +* round -- round the argument to the nearest integer. If argument ends in 0.5 exactly, pick the nearest even integer. +------- + +[+] in the description indicates a function which is not a universal +function and does not follow broadcasting and automatic +array-looping rules. + + +Error handling +-------------- + Errors are handled by returning nans, or other appropriate values. Some of the special function routines will print an error message when an error occurs. By default this printing is disabled. To enable such messages use errprint(1) To disable such messages use errprint(0). - Example: - >>> print scipy.special.bdtr(-1,10,0.3) - >>> scipy.special.errprint(1) - >>> print scipy.special.bdtr(-1,10,0.3) - + Example: + >>> print scipy.special.bdtr(-1,10,0.3) + >>> scipy.special.errprint(1) + >>> print scipy.special.bdtr(-1,10,0.3) """ +__docformat__ = 'restructuredtext' postpone_import = 1 From scipy-svn at scipy.org Wed May 30 05:50:01 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 30 May 2007 04:50:01 -0500 (CDT) Subject: [Scipy-svn] r3059 - trunk/Lib/interpolate Message-ID: <20070530095001.DD71139C0F9@new.scipy.org> Author: oliphant Date: 2007-05-30 04:49:58 -0500 (Wed, 30 May 2007) New Revision: 3059 Modified: trunk/Lib/interpolate/interpolate.py Log: Set-up interpolate structure to support more than just smoothest condition. Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-29 17:46:03 UTC (rev 3058) +++ trunk/Lib/interpolate/interpolate.py 2007-05-30 09:49:58 UTC (rev 3059) @@ -1,3 +1,5 @@ + + """ Classes for interpolating values. """ @@ -387,13 +389,16 @@ fromspline = classmethod(fromspline) -def _find_smoothest(xk, yk, order): +def _find_smoothest(xk, yk, order, conds=None, B=None): # construct Bmatrix, and Jmatrix # e = J*c # minimize norm(e,2) given B*c=yk + # if desired B can be given + # conds is ignored N = len(xk)-1 K = order - B = _fitpack._bsplmat(order, xk) + if B is None: + B = _fitpack._bsplmat(order, xk) J = _fitpack._bspldismat(order, xk) u,s,vh = np.dual.svd(B) ind = K-1 @@ -411,7 +416,6 @@ return dot(tmp, yk) - def _setdiag(a, k, v): assert (a.ndim==2) M,N = a.shape @@ -459,39 +463,8 @@ val = dot(V2,dot(A,V2)) res1 = dot(np.outer(V2,V2)/val,A) mk = dot(np.eye(Np1)-res1,dot(Bd,b)) - return mk + return mk -def _calc_fromJBd(J, Bd, b, V2, NN): - A = dot(J.T,J) - sub = dot(V2.T,dot(A,V2)) - subi = np.linalg.inv(sub) - res0 = dot(V2,subi) - res1 = dot(res0,dot(V2.T,A)) - mk = dot(np.eye(NN)-res1,dot(Bd,b)) - return mk - -def _find_smoothest3(xk, yk): - N = len(xk)-1 - Np1 = N+1 - Nm1 = N-1 - dk = np.diff(xk) - # find B and then take pseudo-inverse - B = np.zeros((Nm1,Np1)) - _setdiag(B,0,dk[:-1]) - _setdiag(B,1,2*(dk[1:]+dk[:-1])) - _setdiag(B,2,dk[1:]) - u,s,vh = np.dual.svd(B) - V2 = vh[-2:,:].T - Bd = dot(vh[:-2,:].T, dot(np.diag(1.0/s),u.T)) - b0 = np.diff(yk)/dk - b = 6*np.diff(b0) - J = np.zeros((N-1,N+1)) - idk = 1.0/dk - _setdiag(J,0,idk[:-1]) - _setdiag(J,1,-idk[1:]-idk[:-1]) - _setdiag(J,2,idk[1:]) - return _calc_fromJBd(J, Bd, b, V2, Np1) - def _get_spline2_Bb(xk, yk, kind, conds): Np1 = len(xk) dk = xk[1:]-xk[:-1] @@ -619,7 +592,61 @@ else: raise ValueError, "%s not supported" % kind +# conds is a tuple of an array and a vector +# giving the left-hand and the right-hand side +# of the additional equations to add to B +def _find_user(xk, yk, order, conds, B): + lh = conds[0] + rh = conds[1] + B = concatenate((B,lh),axis=0) + w = concatenate((yk,rh),axis=0) + M,N = B.shape + if (M>N): + raise ValueError("over-specification of conditions") + elif (M 1) and/or xnew is + N-d, then the result is xnew.shape + cvals.shape[1:] providing the + interpolation of multiple curves. """ oldshape = np.shape(xnew) xx = np.ravel(xnew) @@ -696,7 +705,6 @@ res.shape = oldshape + sh return res - def spltopp(xk,cvals,k): """Return a piece-wise polynomial object from a fixed-spline tuple. """ From scipy-svn at scipy.org Wed May 30 15:12:51 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 30 May 2007 14:12:51 -0500 (CDT) Subject: [Scipy-svn] r3060 - trunk/Lib/interpolate Message-ID: <20070530191251.9345A39C030@new.scipy.org> Author: oliphant Date: 2007-05-30 14:12:49 -0500 (Wed, 30 May 2007) New Revision: 3060 Modified: trunk/Lib/interpolate/interpolate.py Log: Remove spurious : in interpolate.py Modified: trunk/Lib/interpolate/interpolate.py =================================================================== --- trunk/Lib/interpolate/interpolate.py 2007-05-30 09:49:58 UTC (rev 3059) +++ trunk/Lib/interpolate/interpolate.py 2007-05-30 19:12:49 UTC (rev 3060) @@ -612,39 +612,39 @@ # at K-1 farthest separated points in the interval def _find_not_a_knot(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) # If conds is None, then ensure zero-valued second # derivative at K-1 farthest separated points def _find_natural(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) # If conds is None, then ensure zero-valued first # derivative at K-1 farthest separated points def _find_clamped(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) def _find_fixed(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) # If conds is None, then use coefficient periodicity # If conds is 'function' then use function periodicity def _find_periodic(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) # Doesn't use conds def _find_symmetric(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) # conds is a dictionary with multiple values def _find_mixed(xk, yk, order, conds, B): raise NotImplementedError - return _find_user(xk, yk, order, conds, B): + return _find_user(xk, yk, order, conds, B) def splmake(xk,yk,order=3,kind='smoothest',conds=None): From scipy-svn at scipy.org Wed May 30 19:31:30 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 30 May 2007 18:31:30 -0500 (CDT) Subject: [Scipy-svn] r3061 - trunk/Lib/sandbox/arpack/ARPACK/UTIL Message-ID: <20070530233130.484BB39C133@new.scipy.org> Author: hagberg Date: 2007-05-30 18:31:13 -0500 (Wed, 30 May 2007) New Revision: 3061 Modified: trunk/Lib/sandbox/arpack/ARPACK/UTIL/second.f Log: Remove reference to "external etime" since apparently etime is not considered an external function anymore. Modified: trunk/Lib/sandbox/arpack/ARPACK/UTIL/second.f =================================================================== --- trunk/Lib/sandbox/arpack/ARPACK/UTIL/second.f 2007-05-30 19:12:49 UTC (rev 3060) +++ trunk/Lib/sandbox/arpack/ARPACK/UTIL/second.f 2007-05-30 23:31:13 UTC (rev 3061) @@ -21,7 +21,7 @@ * .. * .. External Functions .. REAL ETIME - EXTERNAL ETIME +* EXTERNAL ETIME * .. * .. Executable Statements .. * From scipy-svn at scipy.org Wed May 30 19:59:41 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 30 May 2007 18:59:41 -0500 (CDT) Subject: [Scipy-svn] r3062 - trunk/Lib/sandbox/arpack/tests Message-ID: <20070530235941.CDA8D39C019@new.scipy.org> Author: hagberg Date: 2007-05-30 18:59:23 -0500 (Wed, 30 May 2007) New Revision: 3062 Modified: trunk/Lib/sandbox/arpack/tests/test_arpack.py Log: Modify complex symmetric single precision test of eigenvectors to only compare to 5 decimal places. Probably a reasonable estimate for an error in this case. Addresses ticket #367. Modified: trunk/Lib/sandbox/arpack/tests/test_arpack.py =================================================================== --- trunk/Lib/sandbox/arpack/tests/test_arpack.py 2007-05-30 23:31:13 UTC (rev 3061) +++ trunk/Lib/sandbox/arpack/tests/test_arpack.py 2007-05-30 23:59:23 UTC (rev 3062) @@ -307,7 +307,7 @@ a,aw = self.get_a1(typ) w,v = eigen(a,k,which='LM') for i in range(k): - assert_array_almost_equal(sb.dot(a,v[:,i]),w[i]*v[:,i]) + assert_array_almost_equal(sb.dot(a,v[:,i]),w[i]*v[:,i],decimal=5) aw.real.sort() w.real.sort() assert_array_almost_equal(w,aw[-k:]) From scipy-svn at scipy.org Wed May 30 20:18:42 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Wed, 30 May 2007 19:18:42 -0500 (CDT) Subject: [Scipy-svn] r3063 - trunk/Lib/sandbox/arpack Message-ID: <20070531001842.D2FA339C0A6@new.scipy.org> Author: hagberg Date: 2007-05-30 19:18:38 -0500 (Wed, 30 May 2007) New Revision: 3063 Modified: trunk/Lib/sandbox/arpack/arpack.py Log: Remove unneeded asanyarray() and catch error if input shape is not 2d and square. Fixes #366. Modified: trunk/Lib/sandbox/arpack/arpack.py =================================================================== --- trunk/Lib/sandbox/arpack/arpack.py 2007-05-30 23:59:23 UTC (rev 3062) +++ trunk/Lib/sandbox/arpack/arpack.py 2007-05-31 00:18:38 UTC (rev 3063) @@ -98,9 +98,8 @@ return_eigenvectors -- True|False, return eigenvectors """ - A=sb.asanyarray(A) - n,ny=A.shape try: + n,ny=A.shape n==ny except: raise AttributeError("matrix is not square") @@ -283,9 +282,8 @@ return_eigenvectors -- True|False, return eigenvectors """ - A=sb.asanyarray(A) - n,ny=A.shape try: + n,ny=A.shape n==ny except: raise AttributeError("matrix is not square") From scipy-svn at scipy.org Thu May 31 11:25:28 2007 From: scipy-svn at scipy.org (scipy-svn at scipy.org) Date: Thu, 31 May 2007 10:25:28 -0500 (CDT) Subject: [Scipy-svn] r3065 - trunk Message-ID: <20070531152528.611D439C150@new.scipy.org> Author: cookedm Date: 2007-05-31 10:25:26 -0500 (Thu, 31 May 2007) New Revision: 3065 Added: trunk/setupegg.py Log: Add setupegg.py, for easy setuptools use. Added: trunk/setupegg.py =================================================================== --- trunk/setupegg.py 2007-05-31 11:34:12 UTC (rev 3064) +++ trunk/setupegg.py 2007-05-31 15:25:26 UTC (rev 3065) @@ -0,0 +1,7 @@ +#!/usr/bin/env python +""" +A setup.py script to use setuptools, which gives egg goodness, etc. +""" + +from setuptools import setup +execfile('setup.py') Property changes on: trunk/setupegg.py ___________________________________________________________________ Name: svn:executable + *