From b0fd1de4bc7cdd44c2e4478bdcb352a7b1612a64 Mon Sep 17 00:00:00 2001 From: Michael Hearne Date: Tue, 24 Oct 2017 10:04:03 -0600 Subject: [PATCH 1/3] Bunch of changes --- .gitignore | 3 ++- .travis.yml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 94cef64..2bd196e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,4 +6,5 @@ build .cache .coverage .coverage.xml -earthquake_impact_utils.egg-info/ +htmlcov +earthquake_impact_utils.egg-info/ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 93a5d40..8f681f9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -28,7 +28,7 @@ before_script: script: - export PYTHONPATH="." - echo `which py.test` - - py.test --mpl --cov=impactutils + - py.test --cov=impactutils after_success: - pip install codecov codacy-coverage - codecov From e6e3e277aa2fe4df55addaf843384b64369ed35d Mon Sep 17 00:00:00 2001 From: Michael Hearne Date: Tue, 24 Oct 2017 10:33:29 -0600 Subject: [PATCH 2/3] hdf container class added, still testing. --- impactutils/io/container.py | 564 ++++++++++++++++++++++++++++++++++++ install.sh | 49 ++++ test/io/container_test.py | 237 +++++++++++++++ 3 files changed, 850 insertions(+) create mode 100644 impactutils/io/container.py create mode 100755 install.sh create mode 100755 test/io/container_test.py diff --git a/impactutils/io/container.py b/impactutils/io/container.py new file mode 100644 index 0000000..0d9c93b --- /dev/null +++ b/impactutils/io/container.py @@ -0,0 +1,564 @@ +#!/usr/bin/env python + +# stdlib imports +from datetime import datetime +import collections +import copy + +# third party imports +import h5py +import numpy as np +import pandas as pd + +#local imports +from impactutils.time.ancient_time import HistoricTime + +# list of allowed data types in dictionaries +ALLOWED = [str, int, float, bool, bytes, + type(None), + list, tuple, np.ndarray, + np.float64, np.bool_, np.int64, + dict, datetime, pd.Timestamp, + collections.OrderedDict] + +TIMEFMT = '%Y-%m-%d %H:%M:%S.%f' + +class HDFContainer(object): + def __init__(self,hdfobj): + """ + Instantiate an HDFContainer from an open h5py File Object. + + Args: + hdfobj: Open h5py File Object. + """ + self._hdfobj = hdfobj + + + @classmethod + def create(cls,hdf_file): + """ + Create empty container in input hdf_file. + + Args: + hdf_file: Path to HDF file to be created. + + Returns: + Instance of + """ + hdfobj = h5py.File(hdf_file, "w") + return cls(hdfobj) + + @classmethod + def load(cls, hdf_file): + """ + Instantiate an HDFContainer from an HDF5 file. + + Args: + hdf_file: Valid path to HDF5 file. + + Returns: + Instance of HDFContainer. + """ + hdfobj = h5py.File(hdf_file, "r+") + # probably should do some validating to make sure relevant data exists + return cls(hdfobj) + + def close(self): + """ + Close the HDF file. + """ + self._hdfobj.close() + + def getFileName(self): + """ + Return the name of the HDF5 file associated with this object.. + + Returns: + (str): Name of the file associated with this object. + """ + return self._hdfobj.filename + + #########Dictionaries####################################### + def getDictionary(self,name): + """Return a dictionary stored in container. + + Args: + name (str) String name of HDF group under which dictionary is stored. + + Returns: + (dict) Dictionary that was stored in input named group. + """ + group_name = '__dictionary_%s__' % name + if group_name not in self._hdfobj: + raise LookupError('Dictionary %s not in %s' % (name,self.getFileName())) + mgroup = self._hdfobj[group_name] + dict = _h5group2dict(mgroup) + return dict + + def setDictionary(self,name,dictionary): + """ + Store a dictionary in the HDF file, in group name. + + Args: + name (str) String name of HDF group under which dictionary will be stored. + dictionary (dict) Dictionary containing any of the following combinations + of elements: + - str, int, float, bool, bytes, type(None), + list, tuple, np.ndarray, + np.float64, np.bool_, np.int64, + dict, datetime, pd.Timestamp, + xcollections.OrderedDict + Returns: + (Group) HDF5 Group object. + """ + indict = copy.deepcopy(dictionary) + group_name = '__dictionary_%s__' % name + mgroup = self._hdfobj.create_group(group_name) + _dict2h5group(indict, mgroup) + return mgroup + + def dropDictionary(self,name): + """ + Delete dictionary from container. + + Args: + name (str): + The name of the dictionary to be deleted. + + """ + _drop_item(self._hdfobj,name,'dictionary') + + def getDictionaries(self): + """ + Return list of names of dictionaries stored in container. + + Returns: + (list) List of names of dictionaries stored in container. + """ + + dictionaries = _get_type_list(self._hdfobj,'dictionary') + return dictionaries + #########Dictionaries####################################### + + #########Lists####################################### + def setList(self,name,inlist): + """ + Store a homogenous list in the HDF file. + + Args: + name (str) String name of HDF group under which list will be stored. + inlist (list) List containing any of the following data types: + - str, int, float, bool, bytes, type(None), + list, tuple, np.ndarray, + np.float64, np.bool_, np.int64, + dict, datetime, pd.Timestamp, + collections.OrderedDict + Returns: + (Group) HDF5 Group object. + """ + if isinstance(inlist[0],dict): + raise TypeError('lists with dictionaries are not supported.') + dtype = type(inlist[0]) + for element in inlist[1:]: + if type(element) != dtype: + raise TypeError('Heterogeneous lists are not supported.') + + newlist = _encode_list(inlist[:]) #encode a copy of the list + group_name = '__list_%s__' % name + mgroup = self._hdfobj.create_group(group_name) + mgroup.attrs['list'] = _encode_list(newlist) + return mgroup + + def getList(self,name): + """Return a list stored in container. + + Args: + name (str) String name of HDF group under which list is stored. + + Returns: + (list) List that was stored in input named group. + """ + group_name = '__list_%s__' % name + if group_name not in self._hdfobj: + raise LookupError('List %s not in %s' % (name,self.getFileName())) + mgroup = self._hdfobj[group_name] + outlist = _decode_list(mgroup.attrs['list']) + return outlist + + def getLists(self): + """ + Return list of names of lists stored in container. + + Returns: + (list) List of names of lists stored in container. + """ + lists = _get_type_list(self._hdfobj,'list') + return lists + + def dropList(self,name): + """ + Delete list from container. + + Args: + name (str): + The name of the list to be deleted. + + """ + _drop_item(self._hdfobj,name,'list') + + #########Lists####################################### + + #########Arrays####################################### + + def setArray(self,name,array,metadata=None): + """ + Store a numpy array and optional metadata in the HDF file, in group name. + + Args: + name (str) String name of HDF group under which list will be stored. + array (np.ndarray) Numpy array. + metadata (dict) Dictionary containing any of the following data types: + - str, int, float, bool, bytes, type(None), + list, tuple, np.ndarray, + np.float64, np.bool_, np.int64, + dict, datetime, pd.Timestamp, + collections.OrderedDict + Returns: + (Dataset) HDF5 Dataset object. + """ + array_name = '__array_%s__' % name + dset = self._hdfobj.create_dataset(array_name, data=array) + if metadata: + for key, value in metadata.items(): + dset.attrs[key] = value + return dset + + def getArray(self,name): + """ + Retrieve an array of data and any associated metadata from a dataset. + + Args: + name (str): + The name of the dataset holding the data and metadata. + + Returns: + (tuple) An array of data, and a dictionary of metadata. + """ + + array_name = '__array_%s__' % name + if array_name not in self._hdfobj: + raise LookupError('Array %s not in %s' % (name,self.getFileName())) + dset = self._hdfobj[array_name] + data = dset[()] + metadata = {} + for key, value in dset.attrs.items(): + metadata[key] = value + return data, metadata + + def getArrays(self): + """ + Return list of names of arrays stored in container. + + Returns: + (list) List of names of arrays stored in container. + """ + arrays = _get_type_list(self._hdfobj,'array') + return arrays + + def dropArray(self,name): + """ + Delete array from container. + + Args: + name (str): + The name of the array to be deleted. + + """ + _drop_item(self._hdfobj,name,'array') + + #########Arrays####################################### + + #########Strings####################################### + + def setString(self,name,instring): + """ + Store a string in the HDF file, as the attribute name under a special group. + + Args: + name (str) String name of group attribute under which string will be stored. + instring (str) Python string. + + Returns: + (Group) HDF5 Group object. + """ + + #Create a special group to hold all of these strings as attributes. + group_name = '__string_%s__' % name + mgroup = self._hdfobj.create_group(group_name) + + mgroup.attrs['string'] = instring + return mgroup + + def getString(self,name): + """ + Retrieve a string from a attribute name in a special group. + + Args: + name (str): The name of the attribute containing the string. + + Returns: + (str) A Python string object. + """ + group_name = '__string_%s__' % name + if group_name not in self._hdfobj: + raise LookupError('String %s not in %s' % (name,self.getFileName())) + outstring = self._hdfobj[group_name].attrs['string'] + return outstring + + def getStrings(self): + """ + Return list of names of strings stored in container. + + Returns: + (list) List of names of strings stored in container. + """ + strings = _get_type_list(self._hdfobj,'string') + return strings + + def dropString(self,name): + """ + Delete string from container. + + Args: + name (str): + The name of the string to be deleted. + + """ + _drop_item(self._hdfobj,name,'string') + + #########Strings####################################### + + #########Dataframes####################################### + def setDataFrame(self,name,dataframe): + """ + Store a pandas DataFrame in the HDF file, as a dictionary object. + + Args: + name (str) String name of group under which DataFrame will be stored. + dataframe (pd.DataFrame) pandas DataFrame. + + Returns: + (Group) HDF5 Group object. + """ + framedict = dataframe.to_dict('list') + for cname,column in framedict.items(): + if isinstance(column[0],pd.Timestamp): + column = [c.to_pydatetime() for c in column] + framedict[cname] = column + group_name = '__dataframe_%s__' % name + mgroup = self._hdfobj.create_group(group_name) + _dict2h5group(framedict, mgroup) + return mgroup + + def getDataFrame(self,name): + """Return a DataFrame stored in container. + + Args: + name (str) String name of HDF group under which DataFrame is stored. + + Returns: + (dict) DataFrame that was stored in input named group. + """ + group_name = '__dataframe_%s__' % name + if group_name not in self._hdfobj: + raise LookupError('DataFrame %s not in %s' % (name,self.getFileName())) + mgroup = self._hdfobj[group_name] + datadict = _h5group2dict(mgroup) + for key,value in datadict.items(): + try: + HistoricTime.strptime(value[0],TIMEFMT) + value = [HistoricTime.strptime(v,TIMEFMT) for v in value] + datadict[key] = value + except: + pass + dataframe = pd.DataFrame(datadict) + return dataframe + + def getDataFrames(self): + """ + Return list of names of DataFrames stored in container. + + Returns: + (list) List of names of dictionaries stored in container. + """ + dataframes = _get_type_list(self._hdfobj,'dataframe') + return dataframes + + def dropDataFrame(self,name): + """ + Delete dataframe from container. + + Args: + name (str): + The name of the dataframe to be deleted. + + """ + _drop_item(self._hdfobj,name,'dataframe') + + #########Dataframes####################################### + +def _h5group2dict(group): + """ + Recursively create dictionaries from groups in an HDF file. + + Args: + group: + HDF5 group object. + + Returns: + Dictionary of metadata (possibly containing other dictionaries). + """ + tdict = {} + for (key, value) in group.attrs.items(): # attrs are NOT subgroups + + if isinstance(value,bytes): + value = value.decode('utf8') + try: + value = HistoricTime.strptime(value,TIMEFMT) + except ValueError: + pass + elif isinstance(value,str): + try: + value = HistoricTime.strptime(value,TIMEFMT) + except ValueError: + pass + tdict[key] = value + + for (key, value) in group.items(): # these are going to be the subgroups + tdict[key] = _h5group2dict(value) + return _convert(tdict) + +def _dict2h5group(mydict, group): + """ + Recursively save dictionaries into groups in an HDF group.. + + Args: + mydict (dict): + Dictionary of values to save in group or dataset. Dictionary + can contain objects of the following types: str, unicode, int, + float, long, list, tuple, np. ndarray, dict, + datetime.datetime, collections.OrderedDict + group: + HDF group or dataset in which to storedictionary of data. + + Returns + nothing + """ + for (key, value) in mydict.items(): + tvalue = type(value) + if tvalue not in ALLOWED: + if tvalue.__bases__[0] not in ALLOWED: + raise TypeError('Unsupported metadata value type "%s"' % tvalue) + if isinstance(value, dict): + subgroup = group.create_group(key) + _dict2h5group(value, subgroup) + continue + elif isinstance(value, datetime): + #convert datetime to a string, as there is no good + #floating point format for datetimes before 1970. + value = value.strftime(TIMEFMT) + elif isinstance(value, list): + value = _encode_list(value) + elif isinstance(value,str): + value = value.encode('utf8') + else: + pass + group.attrs[key] = value + + +def _encode_list(value): + for i, val in enumerate(value): + if isinstance(val, list): + value[i] = _encode_list(val) + elif isinstance(val,datetime): + value[i] = val.strftime('%Y-%m-%d %H:%M:%S.%f').encode('utf8') + elif isinstance(val, str): + value[i] = val.encode('utf8') + elif isinstance(val, dict): + raise TypeError('Lists cannot contain dictionaries.') + else: + value[i] = val + return value + +def _decode_list(value): + outlist = [] + for i, val in enumerate(value): + if isinstance(val, list): + outlist.append(_decode_list(val)) + elif isinstance(val, bytes): + tval = val.decode('utf8') + try: + outlist.append(HistoricTime.strptime(tval,TIMEFMT)) + except ValueError: + outlist.append(tval) + elif isinstance(val, dict): + raise TypeError('Lists cannot contain dictionaries.') + else: + outlist.append(val) + return outlist + +def _convert(data): + """ + Recursively convert the bytes elements in a dictionary's values, lists, + and tuples into ascii. + + Args: + data (dict): + A dictionary. + + Returns; + A copy of the dictionary with the byte strings converted to ascii. + """ + if isinstance(data, bytes): + return data.decode('utf8') + if isinstance(data, dict): + return dict(map(_convert, data.items())) + if isinstance(data, tuple): + return tuple(map(_convert, data)) + if type(data) in (np.ndarray, list): + return list(map(_convert, data)) + return data + +def _get_type_list(hdfobj,pattern): + """ + Return the list of groups or datasets from hdf object matching a given pattern. + + Args: + hdfobj: h5py File object. + pattern (str): String to search. Examples could include "dictionary","string","array", etc. + + Returns: + (list) List of un-mangled data set or group names. + + """ + names = [] + for group_name in hdfobj.keys(): + if group_name.startswith('__%s' % pattern): + dname = group_name.replace('__%s_' % pattern,'').replace('__','') + names.append(dname) + return names + +def _drop_item(hdfobj,name,pattern): + """ + Drop a group or dataset from the HDF object. + + Args: + hdfobj: h5py File object. + name: Un-mangled name of group or dataset to delete. + pattern: The type of group or dataset to be deleted ("dictionary","string","array", etc.) + """ + + group_name = '__%s_%s__' % (pattern,name) + if group_name not in hdfobj: + raise LookupError('%s %s not in %s' % (pattern,name,self.getFileName())) + del hdfobj[group_name] diff --git a/install.sh b/install.sh new file mode 100755 index 0000000..0c8faf6 --- /dev/null +++ b/install.sh @@ -0,0 +1,49 @@ +#!/bin/bash + +VENV=impact +PYVER=3.6 + +DEPARRAY=(ipython=6.1.0 jupyter=1.0.0 pandas=0.20.2 \ + numpy=1.12.1 matplotlib=1.5.3 cartopy=0.15.1 fiona=1.7.8 \ + shapely=1.5.17 pytest=3.1.2 pytest-cov=2.5.1 pycrypto=2.6.1 \ + paramiko=2.1.2 beautifulsoup4=4.5.3 h5py=2.7.0) + +# unamestr=`uname` +# if [[ "$unamestr" == 'Linux' ]]; then +# DEPARRAY=(numpy=1.11 scipy=0.19.1 matplotlib=2.0.2 rasterio=1.0a2 \ +# pandas=0.20.3 h5py=2.7.0 gdal=2.1.4 pytest=3.2.0 pytest-cov=2.5.1 \ +# cartopy=0.15.1 fiona=1.7.8 numexpr=2.6.2 configobj=5.0.6 decorator=4.1.2 \ +# versioneer==0.18) +# elif [[ "$unamestr" == 'FreeBSD' ]] || [[ "$unamestr" == 'Darwin' ]]; then +# DEPARRAY=(numpy=1.13.1 scipy=0.19.1 matplotlib=2.0.2 rasterio=1.0a9 \ +# pandas=0.20.3 h5py=2.7.0 gdal=2.1.4 pytest=3.2.0 pytest-cov=2.5.1 \ +# cartopy=0.15.1 fiona=1.7.8 numexpr=2.6.2 configobj=5.0.6 decorator=4.1.2 \ +# versioneer==0.18) +# fi + +#if we're already in an environment called pager, switch out of it so we can remove it +source activate root + +#remove any previous virtual environments called libcomcat +CWD=`pwd` +cd $HOME; +conda remove --name $VENV --all -y +cd $CWD + +#create a new virtual environment called $VENV with the below list of dependencies installed into it +conda create --name $VENV --yes --channel conda-forge python=$PYVER ${DEPARRAY[*]} -y + +#activate the new environment +source activate $VENV + +#install openquake from github +curl --max-time 300 --retry 3 -L https://github.com/gem/oq-engine/archive/master.zip -o openquake.zip +pip -v install --no-deps openquake.zip +rm openquake.zip + +# This package +echo "Installing impactutils..." +pip install -e . + +#tell the user they have to activate this environment +echo "Type 'source activate ${VENV}' to use this new virtual environment." diff --git a/test/io/container_test.py b/test/io/container_test.py new file mode 100755 index 0000000..9bcee26 --- /dev/null +++ b/test/io/container_test.py @@ -0,0 +1,237 @@ +#!/usr/bin/env python + +from impactutils.io.container import HDFContainer +import numpy as np +import pandas as pd +from datetime import datetime +import tempfile +import os.path + +def test_hdf_dictonaries(): + f,testfile = tempfile.mkstemp() + os.close(f) + try: + container = HDFContainer.create(testfile) + + #test simple dictionary + print('Test simple dictionary...') + indict1 = {'name':'Fred','age':34,'dob':datetime(1950,1,1,23,43,12)} + container.setDictionary('person',indict1) + outdict = container.getDictionary('person') + assert outdict == indict1 + + #test more complicated dictionary + print('Test complex dictionary...') + indict2 = {'names':['Fred','Akyüz'],'ages':[34,33]} + container.setDictionary('people',indict2) + outdict = container.getDictionary('people') + assert outdict == indict2 + + #test getDictionaryNames() + print('Test dictionary names...') + names = container.getDictionaries() + assert sorted(names) == sorted(['person','people']) + + #test dropping a dictionary + container.dropDictionary('person') + assert container.getDictionaries() == ['people'] + + #try closing container and reopening + container.close() + container2 = HDFContainer.load(testfile) + assert container2.getDictionaries() == ['people'] + + except Exception: + assert 1==2 + finally: + os.remove(testfile) + +def test_hdf_lists(): + f,testfile = tempfile.mkstemp() + os.close(f) + try: + container = HDFContainer.create(testfile) + + #test setting a list of strings + inlist = ['one','two','three'] + container.setList('test_list1',inlist) + assert container.getList('test_list1') == inlist + + #test setting a list of numbers + inlist = [5.4,1.2,3.4] + container.setList('test_list2',inlist) + assert container.getList('test_list2') == inlist + + #test setting a list of datetimes + inlist = [datetime(1900,1,1),datetime.utcnow()] + container.setList('test_list3',inlist) + assert container.getList('test_list3') == inlist + + #test getlists + assert sorted(container.getLists()) == ['test_list1','test_list2','test_list3'] + + #test setting a heterogeneous list (should fail) + inlist = ['one',2,'three'] + try: + container.setList('test_list4',inlist) + except TypeError: + assert 1==1 + + #test setting a list with dictionaries in it (should fail) + inlist = [{'a':1},{'b':2}] + try: + container.setList('test_list5',inlist) + except TypeError: + assert 1==1 + + #drop a list + container.dropList('test_list1') + assert sorted(container.getLists()) == ['test_list2','test_list3'] + + #close container, re-open + container.close() + container2 = HDFContainer.load(testfile) + assert sorted(container2.getLists()) == ['test_list2','test_list3'] + + + except Exception: + assert 1==2 + finally: + os.remove(testfile) + +def test_hdf_arrays(): + f,testfile = tempfile.mkstemp() + os.close(f) + try: + container = HDFContainer.create(testfile) + + #test simple array + print('Test simple array...') + data = np.random.rand(4,3) + metadata = {'xmin':54.1,'xmax':123.1} + container.setArray('testdata1',data,metadata) + outdata,outmetadata = container.getArray('testdata1') + np.testing.assert_array_equal(outdata,data) + assert outmetadata == metadata + + #test array with nans + print('Test nans array...') + data = np.random.rand(4,3) + data[1,1] = np.nan + metadata = {'xmin':54.1,'xmax':123.1} + container.setArray('testdata2',data,metadata) + outdata,outmetadata = container.getArray('testdata2') + np.testing.assert_array_equal(outdata,data) + assert outmetadata == metadata + + #test getArrayNames + print('Test array names...') + names = container.getArrays() + assert sorted(names) == sorted(['testdata1','testdata2']) + + #drop an array + container.dropArray('testdata1') + names = container.getArrays() + assert names == ['testdata2'] + + #close container, re-open + container.close() + container2 = HDFContainer.load(testfile) + assert container2.getArrays() == ['testdata2'] + + + + except Exception: + assert 1==2 + finally: + os.remove(testfile) + +def test_hdf_strings(): + f,testfile = tempfile.mkstemp() + os.close(f) + try: + container = HDFContainer.create(testfile) + + #test simple string + print('Test simple string...') + string1 = "These are the times that try men's souls." + container.setString('test_string1',string1) + outstring = container.getString('test_string1') + assert outstring == string1 + + #test unicode string + print('Test unicode string...') + string2 = "#SOURCE: Barka, A., H. S. Akyüz, E. Altunel, G. Sunal, Z. Çakir," + container.setString('test_string2',string2) + outstring = container.getString('test_string2') + assert outstring == string2 + + #test getstrings + print('Test string names...') + names = container.getStrings() + assert names == ['test_string1','test_string2'] + + #drop string + container.dropString('test_string1') + assert container.getStrings() == ['test_string2'] + + #close container, re-open + container.close() + container2 = HDFContainer.load(testfile) + assert container2.getStrings() == ['test_string2'] + + except Exception: + assert 1==2 + finally: + os.remove(testfile) + + +def test_hdf_dataframes(): + f,testfile = tempfile.mkstemp() + os.close(f) + try: + container = HDFContainer.create(testfile) + + #test pandas dataframe + print('Test dataframe...') + d = {'Time':[datetime(1900,1,1),datetime(2000,1,1)], + 'ID':['thing1','thing2'], + 'Number':np.array([12.34,25.67])} + df = pd.DataFrame(d) + container.setDataFrame('testframe1',df) + outdf = container.getDataFrame('testframe1') + assert outdf['Number'].sum() == df['Number'].sum() + assert outdf['Time'][0] == df['Time'][0] + + #test another dataframe + df2 = pd.DataFrame(data=[4,5,6,7], index=range(0,4), columns=['A']) + container.setDataFrame('testframe2',df2) + outdf = container.getDataFrame('testframe2') + outdf['A'].sum() == df2['A'].sum() + + #test getdataframes + assert sorted(container.getDataFrames()) == ['testframe1','testframe2'] + + #drop a dataframe + container.dropDataFrame('testframe1') + assert container.getDataFrames() == ['testframe2'] + + #close container, re-open + container.close() + container2 = HDFContainer.load(testfile) + assert container2.getDataFrames() == ['testframe2'] + + except Exception: + assert 1==2 + finally: + os.remove(testfile) + + + +if __name__ == '__main__': + test_hdf_dictonaries() + test_hdf_lists() + test_hdf_arrays() + test_hdf_strings() + test_hdf_dataframes() + From 6a3427ec1e007c1bb90c67388cded426da85e55b Mon Sep 17 00:00:00 2001 From: Michael Hearne Date: Wed, 25 Oct 2017 10:19:46 -0600 Subject: [PATCH 3/3] Forgot to tell travis to run install.sh --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8f681f9..b178653 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,7 +13,7 @@ install: - conda config --set always_yes yes --set changeps1 no - conda update -q conda - conda info -a - - bash setup_env.sh + - bash install.sh - source activate impact #--------------------- # Install impactutils