diff --git a/docs/GLM/Global/Geocode.md b/docs/GLM/Global/Geocode.md new file mode 100644 index 000000000..ebcee6210 --- /dev/null +++ b/docs/GLM/Global/Geocode.md @@ -0,0 +1,51 @@ +[[/GLM/Global/Geocode]] -- Geohash encoder + +# Synopsis + +GLM: + +~~~ +${GEOCODE ,[#]} +${GEOCODE [#]} +~~~ + +# Description + +Return the geohash code corresponding to the latitude/longitude given or the object name. This can be helpful is connecting object based on location, such as linking an object to weather. + +The default resolution is 5. The resolution corresponds to the following distances: + + 1 2500 km + 2 600 km + 3 80 km + 4 20 km + 5 2.5 km + 6 0.2 km + 7 0.08 km + 8 0.02 km + 9 0.0025 km + 10 0.0006 km + 11 0.000075 km + +# Example + +The following example prints the geohash codes for a position and an object: + +~~~ +class test +{ + char32 geocode; +} +object test +{ + name "test"; + latitude 37.5; + longitude -122.2; +} +#print ${GEOCODE 37.5,-122.2#6} +#print ${GEOCODE test#6} +~~~ + +# See also + +* [[/GLM/Subcommand/Geodata]] diff --git a/docs/GLM/Macro/Ifexist.md b/docs/GLM/Macro/Ifexist.md index f6466477e..f02b32ed1 100644 --- a/docs/GLM/Macro/Ifexist.md +++ b/docs/GLM/Macro/Ifexist.md @@ -34,11 +34,8 @@ The `#ifexist` macro is used to conditionally process GLM lines when a file is f #endif ~~~ -# Caveats - -1. TODO enumerate known issues using Ifexist - # See also * [[/Glm/Macro/If]] +* [[/Glm/Macro/Ifmissing]] diff --git a/docs/GLM/Macro/Ifmissing.md b/docs/GLM/Macro/Ifmissing.md new file mode 100644 index 000000000..030a47da2 --- /dev/null +++ b/docs/GLM/Macro/Ifmissing.md @@ -0,0 +1,41 @@ +[[/Glm/Macro/Ifmissing]] -- Macro to conditionally process GLM lines when a file missing + +# Synopsis + +GLM: + +~~~ +#ifmissing +... +[#else] +... +#endif +~~~ + +or + +~~~ +#ifmissing "" +... +[#else] +... +#endif +~~~ + +# Description + +The `#ifmissing` macro is used to conditionally process GLM lines when a file is not found. + +# Examples + +~~~ +#ifmissing "myfile.glm" +#print didn't find it +#endif +~~~ + +# See also + +* [[/Glm/Macro/If]] +* [[/Glm/Macro/Ifexist]] + diff --git a/gldcore/autotest/test_geocode.glm b/gldcore/autotest/test_geocode.glm new file mode 100644 index 000000000..333253974 --- /dev/null +++ b/gldcore/autotest/test_geocode.glm @@ -0,0 +1,21 @@ +#set suppress_repeat_messages=false + +class test +{ + char32 geocode; +} + +object test +{ + name "test"; + latitude 37.5; + longitude -122.2; +} + +#if ${GEOCODE 37.5,-122.2#6} != 9q9j76 +#error geocode "37.5,-122.2#6" does not match "9q9j76" +#endif + +#if ${GEOCODE test#6} != 9q9j76 +#error geocode "test#6" does not match "9q9j76" +#endif diff --git a/gldcore/globals.cpp b/gldcore/globals.cpp index c993a9a58..676a720ba 100644 --- a/gldcore/globals.cpp +++ b/gldcore/globals.cpp @@ -1437,6 +1437,93 @@ DEPRECATED const char *global_findobj(char *buffer, int size, const char *spec) return buffer; } +const char *geocode_encode(char *buffer, int len, double lat, double lon, int resolution=12) +{ + static const char *base32 = "0123456789bcdefghjkmnpqrstuvwxyz"; + if ( len < resolution+1 ) + { + output_warning("geocode_encode(buffer=%p, len=%d, lat=%g, lon=%g, resolution=%d): buffer too small for specified resolution, result truncated", + buffer, len, lat, lon, resolution); + resolution = len-1; + } + double lat_interval[] = {-90,90}; + double lon_interval[] = {-180,180}; + char *geohash = buffer; + geohash[0] = '\0'; + int bits[] = {16,8,4,2,1}; + int bit = 0; + int ch = '\0'; + bool even = true; + int i = 0; + while ( i < resolution ) + { + if ( even ) + { + double mid = (lon_interval[0]+lon_interval[1])/2; + if ( lon > mid ) + { + ch |= bits[bit]; + lon_interval[0] = mid; + } + else + { + lon_interval[1] = mid; + } + } + else + { + double mid = (lat_interval[0]+lat_interval[1])/2; + if ( lat > mid ) + { + ch |= bits[bit]; + lat_interval[0] = mid; + } + else + { + lat_interval[1] = mid; + } + } + even = !even; + if ( bit < 4 ) + { + bit += 1; + } + else + { + *geohash++ = base32[ch]; + i++; + bit = 0; + ch = 0; + } + } + *geohash++ = '\0'; + return buffer; +} + +DEPRECATED const char *global_geocode(char *buffer, int size, const char *spec) +{ + double lat, lon; + OBJECT *obj; + unsigned int res = 5; // about 2.4 km resolution by default + char name[64]; + if ( sscanf(spec,"%lg,%lg#%u",&lat,&lon,&res) >= 2 ) + { + return geocode_encode(buffer,size,lat,lon,res); + } + else if ( sscanf(spec,"%63[^#]#%u",name,&res) >= 1 && (obj=object_find_name(name)) != NULL ) + { + lat = obj->latitude; + lon = obj->longitude; + if ( isfinite(lat) && isfinite(lon) && lat>=-90 && lat<=+90 && lon>=-180 && lon<=180 ) + { + return geocode_encode(buffer,size,lat,lon,res); + } + } + output_warning("${GEOCODE %s}: geocode spec is not valid",spec); + buffer[0] = '\0'; + return buffer; +} + /** Get the value of a global variable in a safer fashion @return a \e char * pointer to the buffer holding the buffer where we wrote the data, \p NULL if insufficient buffer space or if the \p name was not found. @@ -1531,6 +1618,10 @@ const char *GldGlobals::getvar(const char *name, char *buffer, size_t size) if ( strncmp(name,"FIND ",5) == 0 ) { return global_findobj(buffer,size,name+5); + } + if ( strncmp(name,"GEOCODE ",8) == 0 ) + { + return global_geocode(buffer,size,name+8); } /* expansions */ if ( parameter_expansion(buffer,size,name) ) diff --git a/gldcore/gridlabd.in b/gldcore/gridlabd.in index d6fa12331..c38c769f5 100644 --- a/gldcore/gridlabd.in +++ b/gldcore/gridlabd.in @@ -1,21 +1,19 @@ #! /bin/sh -# Generated from gridlabd.m4sh by GNU Autoconf 2.71. +# Generated from gldcore/gridlabd.m4sh by GNU Autoconf 2.69. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh -as_nop=: -if test ${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 -then : +if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST -else $as_nop +else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( @@ -25,46 +23,46 @@ esac fi - -# Reset variables that may have inherited troublesome values from -# the environment. - -# IFS needs to be set, to space, tab, and newline, in precisely that order. -# (If _AS_PATH_WALK were called with IFS unset, it would have the -# side effect of setting IFS to empty, thus disabling word splitting.) -# Quoting is to prevent editors from complaining about space-tab. as_nl=' ' export as_nl -IFS=" "" $as_nl" - -PS1='$ ' -PS2='> ' -PS4='+ ' - -# Ensure predictable behavior from utilities with locale-dependent output. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# We cannot yet rely on "unset" to work, but we need these variables -# to be unset--not just set to an empty or harmless value--now, to -# avoid bugs in old shells (e.g. pre-3.0 UWIN ksh). This construct -# also avoids known problems related to "unset" and subshell syntax -# in other old shells (e.g. bash 2.01 and pdksh 5.2.14). -for as_var in BASH_ENV ENV MAIL MAILPATH CDPATH -do eval test \${$as_var+y} \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done - -# Ensure that fds 0, 1, and 2 are open. -if (exec 3>&0) 2>/dev/null; then :; else exec 0&1) 2>/dev/null; then :; else exec 1>/dev/null; fi -if (exec 3>&2) ; then :; else exec 2>/dev/null; fi +# Printing a long string crashes Solaris 7 /usr/bin/printf. +as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo +as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo +# Prefer a ksh shell builtin over an external printf program on Solaris, +# but without wasting forks for bash or zsh. +if test -z "$BASH_VERSION$ZSH_VERSION" \ + && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='print -r --' + as_echo_n='print -rn --' +elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then + as_echo='printf %s\n' + as_echo_n='printf %s' +else + if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then + as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' + as_echo_n='/usr/ucb/echo -n' + else + as_echo_body='eval expr "X$1" : "X\\(.*\\)"' + as_echo_n_body='eval + arg=$1; + case $arg in #( + *"$as_nl"*) + expr "X$arg" : "X\\(.*\\)$as_nl"; + arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; + esac; + expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" + ' + export as_echo_n_body + as_echo_n='sh -c $as_echo_n_body as_echo' + fi + export as_echo_body + as_echo='sh -c $as_echo_body as_echo' +fi # The user is always right. -if ${PATH_SEPARATOR+false} :; then +if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || @@ -73,6 +71,13 @@ if ${PATH_SEPARATOR+false} :; then fi +# IFS +# We need space, tab and new line, in precisely that order. Quoting is +# there to prevent editors from complaining about space-tab. +# (If _AS_PATH_WALK were called with IFS unset, it would disable word +# splitting by setting IFS to empty value.) +IFS=" "" $as_nl" + # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( @@ -81,12 +86,8 @@ case $0 in #(( for as_dir in $PATH do IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac - test -r "$as_dir$0" && as_myself=$as_dir$0 && break + test -z "$as_dir" && as_dir=. + test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS @@ -98,22 +99,40 @@ if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then - printf "%s\n" "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 + $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi +# Unset variables that we do not need and which cause bugs (e.g. in +# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" +# suppresses any "Segmentation fault" message there. '((' could +# trigger a bug in pdksh 5.2.14. +for as_var in BASH_ENV ENV MAIL MAILPATH +do eval test x\${$as_var+set} = xset \ + && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : +done +PS1='$ ' +PS2='> ' +PS4='+ ' + +# NLS nuisances. +LC_ALL=C +export LC_ALL +LANGUAGE=C +export LANGUAGE + +# CDPATH. +(unset CDPATH) >/dev/null 2>&1 && unset CDPATH if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="as_nop=: -if test \${ZSH_VERSION+y} && (emulate sh) >/dev/null 2>&1 -then : + as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST -else \$as_nop +else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( @@ -133,46 +152,36 @@ as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ) -then : +if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : -else \$as_nop +else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 -blah=\$(echo \$(echo blah)) -test x\"\$blah\" = xblah || exit 1 test -x / || exit 1" as_suggested="" - if (eval "$as_required") 2>/dev/null -then : + if (eval "$as_required") 2>/dev/null; then : as_have_required=yes -else $as_nop +else as_have_required=no fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null -then : + if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : -else $as_nop +else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS - case $as_dir in #((( - '') as_dir=./ ;; - */) ;; - *) as_dir=$as_dir/ ;; - esac + test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. - as_shell=$as_dir$as_base + as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - as_run=a "$as_shell" -c "$as_bourne_compatible""$as_required" 2>/dev/null -then : + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes break 2 fi @@ -180,21 +189,14 @@ fi esac as_found=false done -IFS=$as_save_IFS -if $as_found -then : - -else $as_nop - if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - as_run=a "$SHELL" -c "$as_bourne_compatible""$as_required" 2>/dev/null -then : +$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && + { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes -fi -fi +fi; } +IFS=$as_save_IFS - if test "x$CONFIG_SHELL" != x -then : + if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also @@ -212,19 +214,18 @@ esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. -printf "%s\n" "$0: could not re-execute with $CONFIG_SHELL" >&2 +$as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi - if test x$as_have_required = xno -then : - printf "%s\n" "$0: This script requires a shell more modern than all" - printf "%s\n" "$0: the shells that I found on your system." - if test ${ZSH_VERSION+y} ; then - printf "%s\n" "$0: In particular, zsh $ZSH_VERSION has bugs and should" - printf "%s\n" "$0: be upgraded to zsh 4.3.4 or later." + if test x$as_have_required = xno; then : + $as_echo "$0: This script requires a shell more modern than all" + $as_echo "$0: the shells that I found on your system." + if test x${ZSH_VERSION+set} = xset ; then + $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" + $as_echo "$0: be upgraded to zsh 4.3.4 or later." else - printf "%s\n" "$0: Please tell bug-autoconf@gnu.org about your system, + $as_echo "$0: Please tell bug-autoconf@gnu.org about your system, $0: including any error possibly output before this $0: message. Then install a modern shell, or manually run $0: the script under such a shell if you do have one." @@ -250,16 +251,6 @@ as_fn_unset () { eval $1=; unset $1;} } as_unset=as_fn_unset - -# as_fn_nop -# --------- -# Do nothing but, unlike ":", preserve the value of $?. -as_fn_nop () -{ - return $? -} -as_nop=as_fn_nop - ## -------------------- ## ## Main body of script. ## ## -------------------- ## @@ -339,10 +330,9 @@ export LIB="-L$libdir -L/usr/local/lib -L/usr/lib" export LDFLAGS="${LIB} ${PYLDFLAGS} ${LDFLAGS}" export PYTHONPATH=.:${GLD_ETC}${PYTHONPATH:+:}${PYTHONPATH} -if test "x$GLPATH" = x -then : +if test "x$GLPATH" = x; then : export GLPATH="$pkglibdir:$pkgdatadir" -else $as_nop +else export GLPATH="$pkglibdir:$pkgdatadir:$GLPATH" fi @@ -365,14 +355,16 @@ elif test "x$1" = "xvalgrind" ; then : exit 0 fi -if test -f "${pkgdatadir}/gridlabd.rc" -then : +if test -f "${pkgdatadir}/gridlabd.rc"; then : . ${pkgdatadir}/gridlabd.rc fi -if test -x "${bindir}/gridlabd-$1" -then : +if test -f "${GLD_ETC}/$1.py"; then : + export PYTHONPATH=$GLD_ETC; /usr/local/bin/python3 -m "$@" ; exit $? +fi + +if test -x "${bindir}/gridlabd-$1"; then : "${bindir}/gridlabd"-"$@" -else $as_nop +else "${bindir}/gridlabd.bin" "$@" && "${bindir}/gridlabd-version" check -w fi diff --git a/gldcore/gridlabd.m4sh b/gldcore/gridlabd.m4sh index a3ecff3bb..178904c93 100644 --- a/gldcore/gridlabd.m4sh +++ b/gldcore/gridlabd.m4sh @@ -102,6 +102,10 @@ AS_IF([test -f "${pkgdatadir}/gridlabd.rc"], [. ${pkgdatadir}/gridlabd.rc], []) +AS_IF([test -f "${GLD_ETC}/$1.py"], + [export PYTHONPATH=$GLD_ETC; /usr/local/bin/python3 -m "$@" ; exit $?], + []) + AS_IF([test -x "${bindir}/gridlabd-$1"], ["${bindir}/gridlabd"-"$@"], ["${bindir}/gridlabd.bin" "$@" && "${bindir}/gridlabd-version" check -w]) diff --git a/gldcore/load.cpp b/gldcore/load.cpp index 9842541fa..60cc181aa 100755 --- a/gldcore/load.cpp +++ b/gldcore/load.cpp @@ -7373,6 +7373,33 @@ int GldLoader::process_macro(char *line, int size, char *_filename, int linenum) strcpy(line,"\n"); return TRUE; } + else if (strncmp(line,"#ifmissing",8)==0) + { + char *term = strchr(line+8,' '); + char value[1024]; + char path[1024]; + if (term==NULL) + { + syntax_error(filename,linenum,"#ifmissing macro missing term"); + return FALSE; + } + while(isspace((unsigned char)(*term))) + ++term; + //if (sscanf(term,"\"%[^\"\n]",value)==1 && find_file(value, NULL, 0)==NULL) + strcpy(value, strip_right_white(term)); + if(value[0] == '"'){ + char stripbuf[1024]; + sscanf(value, "\"%[^\"\n]", stripbuf); + strcpy(value, stripbuf); + } + if (find_file(value, NULL, F_OK, path,sizeof(path))!=NULL) + suppress |= (1<loop==my->loopnum) { - my->next.ts = (unsigned short)S; + my->next.ts = (TIMESTAMP)S; my->next.ns = (unsigned int)(1e9*(S-my->next.ts)); if ((obj->flags & OF_DELTAMODE)==OF_DELTAMODE) /* Only request deltamode if we're explicitly enabled */ enable_deltamode(my->next.ns==0?TS_NEVER:t1); diff --git a/python_extras/.gitignore b/python_extras/.gitignore new file mode 100644 index 000000000..3ca781a19 --- /dev/null +++ b/python_extras/.gitignore @@ -0,0 +1,3 @@ +test.csv +test.glm +test_out.csv diff --git a/python_extras/Makefile.mk b/python_extras/Makefile.mk index a1974630e..878f4ff1e 100644 --- a/python_extras/Makefile.mk +++ b/python_extras/Makefile.mk @@ -1,11 +1,12 @@ dist_pkgdata_DATA += python_extras/csv_merge/csv_merge.py +dist_pkgdata_DATA += python_extras/eia_recs.py dist_pkgdata_DATA += python_extras/example/house.glm dist_pkgdata_DATA += python_extras/example/tstat_commit.py dist_pkgdata_DATA += python_extras/example/tstat_init.py -dist_pkgdata_DATA += python_extras/volt_dump/meter_record.py -dist_pkgdata_DATA += python_extras/volt_dump/voltdump.py -dist_pkgdata_DATA += python_extras/metar2glm.py -dist_pkgdata_DATA += python_extras/gridlabd-editor.py dist_pkgdata_DATA += python_extras/gridlabd-editor.png +dist_pkgdata_DATA += python_extras/gridlabd-editor.py +dist_pkgdata_DATA += python_extras/metar2glm.py +dist_pkgdata_DATA += python_extras/nsrdb_weather.py dist_pkgdata_DATA += python_extras/ucar_weather.py -dist_pkgdata_DATA += python_extras/eia_recs.py +dist_pkgdata_DATA += python_extras/volt_dump/meter_record.py +dist_pkgdata_DATA += python_extras/volt_dump/voltdump.py diff --git a/python_extras/example/nsrdb_weather.glm b/python_extras/example/nsrdb_weather.glm new file mode 100644 index 000000000..ec715d939 --- /dev/null +++ b/python_extras/example/nsrdb_weather.glm @@ -0,0 +1,6 @@ +// This example illustrates how to download data from the NREL NSRDB database and generate a GLM file to load it + +#ifmissing "/tmp/test.csv" +#system gridlabd nsrdb_weather -y=2018-2020 -p=37.4,-122.2 -i=5 -g=/tmp/test.glm -c=/tmp/test.csv -n=test +#endif +#include "/tmp/test.glm" diff --git a/python_extras/nsrdb_weather.py b/python_extras/nsrdb_weather.py new file mode 100644 index 000000000..c32cb3cd0 --- /dev/null +++ b/python_extras/nsrdb_weather.py @@ -0,0 +1,515 @@ +"""NSRDB weather data tool + +SYNOPSIS + +Shell: + bash$ gridlabd nsrdb_weather -y|--year=YEARS -p|-position=LAT,LON + [-i|--interpolate=MINUTES|METHOD] [-e|--encode=LAT,LON] + [-g|--glm=GLMNAME] [-n|--name=OBJECTNAME] [-c|--csv=CSVNAME] + [--whoami] [--signup=EMAIL] [--apikey[=APIKEY]] + [--test] [-v|--verbose] [-h|--help|help] + +GLM: + #system gridlabd nsrdb_weather -y|--year=YEARS -p|-position=LAT,LON + [-i|--interpolate=MINUTES|METHOD] [-e|--encode=LAT,LON] + [-g|--glm=GLMNAME] [-n|--name=OBJECTNAME] [-c|--csv=CSVNAME] + [--whoami] [--signup=EMAIL] [--apikey[=APIKEY]] + [--test] [-v|--verbose] [-h|--help|help] + #include "GLMNAME" + +Python: + bash$ gridlabd python + >>> import nsrdb_weather as ns + >>> data = ns.getyears(YEARS,LAT,LON) + >>> ns.writeglm(data,GLMNAME,OBJECTNAME,CSVNAME) + +DESCRIPTION + +This module downloads weather data from NSRDB and writes GLM files. This can +be done from the command line or using call the python API. + +Downloaded weather data is for a specified location and year, which must be +provided. The data is downloaded in either 30 or 60 intervals and cached for +later used. The data that is delivered from the cache can be further +interpolated down to 1 minute. + +By default the weather data is output to /dev/stdout. If the CSV file name +is specified using `-c|--csv=CSVNAME, the data will be written to that file. + +If the GLM file name is specified, the CSV file will be formatted for +compatibility with GridLAB-D players and the GLM file will contain a +definition of the weather class, a weather object, and a player object to +feed the weather data in from the CSV. If the weather object name is not +provided, then the name is automatically generated using a geohash code at +about 2.5 km resolution, e.g., "weather@9q9j6". To change the geohash +resolution, you must change the `geocode_precision` parameter. To determine +the geohash for a location use the `-e|--encode` option. + +The GLM file can be output to "/dev/stdout" for embedding in other GLM files. +For example: + + #include (gridlabd nsrdb_weather -y=2010 -p=37.5,-122.2 -g=/dev/stdout) + +This is equivalent to + + #gridlabd gridlabd nsrdb_weather -y=2010 -p=37.5,-122.2 -g=/tmp/example.glm + #include "/tmp/example.glm" + +without the creation of the temporary file. + +The global `${WEATHER}` is set to a space-delimited list of the weather +objects defined in the GLM file. + +PARAMETERS + +The module uses several parameters to control its behavior. + + leap = True # include leap day in data + interval = 60 # sample interval, may be 30 or 60 minutes + utc = False # timestamps in UTC + email="gridlabd@gmail.com" # credential email + verbose = False # verbose output enable + server = "https://developer.nrel.gov/api/solar/nsrdb_psm3_download.csv" # NSRDB server URL + cachedir = "/usr/local/share/gridlabd/weather" # local NSRDB cache folder + attributes = 'ghi,dhi,dni,cloud_type,dew_point,air_temperature,surface_albedo,wind_speed,wind_direction,solar_zenith_angle' # NSRDB fields to download + credential_file = f"{os.getenv('HOME')}/.nsrdb/credentials.json" # local credential file location + geocode_precision = 5 # about 2.5 km geohash resolution (uses for automatic naming of weather objects) + float_format="%.1f" + +The geocode precisions are roughly as follows: + + 1 2,500 km + 2 600 km + 3 80 km + 4 20 km + 5 2.5 km + 6 200 m + 7 80 m + 8 20 m + 9 2.5 m + 10 60 cm + 11 7.5 cm + +You can change these options in Python scripts. + + >>> import nsrdb_weather as ns + >>> ns.interval = 30 + >>> data = ns.getyear(2014,45.62,-122.70) + +You can permanently change these options by creating the local or shared file +called `nsrdb_weather_config.py`. If found, this file will be imported after +the defaults are set. Note that the default year, position, glm, csv, and name +cannot be changed. + +CREDENTIALS + +You must obtain an API key from https://developer.nrel.gov/signup/. Save the key +in the credentials file, which is by default `$HOME/.nsrdb/credentials.json`. + +You can run this process in a semi-automated manner using the command + + bash$ gridlabd nsrdb_weather --signup + +with which you can copy and paste a new key in the credential file. + +CAVEATS + +Normally the column units are included in the column names when the CSV file is written. However +when the GLM file is written, the column units are not included in the column names. The units +are given as part of the `weather` class definition generated by the GLM writer. + +EXAMPLE + +The following command downloads only the CSV data for a location: + + bash$ gridlabd nsrdb_weather -y=2014,2015 -p=45.62,-122.70 -c=test.csv + +The following command downloads the CSV data and creates a GLM file with the data linked and weather object named: + + bash$ gridlabd nsrdb_weather -y=2014,2015 -p=45.62,-122.70 -c=test.csv -n=test -g=test.glm + +SEE ALSO + +* [https://nsrdb.nrel.gov/data-sets/api-instructions.html] +""" + +import sys, os, json, requests, pandas, numpy, datetime + +leap = True +interval = 60 +utc = False +email = None # by default this will be the first key in the credentials file +interpolate_time = None +interpolate_method = 'linear' +server = "https://developer.nrel.gov/api/solar/nsrdb_psm3_download.csv" +cachedir = "/usr/local/share/gridlabd/weather" +attributes = 'ghi,dhi,dni,cloud_type,dew_point,air_temperature,surface_albedo,wind_speed,wind_direction,solar_zenith_angle,relative_humidity,surface_pressure' +credential_file = f"{os.getenv('HOME')}/.nsrdb/credentials.json" +geocode_precision = 6 +float_format="%.1f" +date_format="%Y-%m-%d %H:%M:%S" +verbose_enable = False + +try: + from nsrdb_weather_config import * +except: + pass + +def error(msg,code=None): + """Display an error message and exit if code is a number""" + if code != None: + print(f"ERROR [nsrdb_weather.py]: {msg}",file=sys.stderr) + exit(code) + else: + raise Exception(msg) + +def syntax(code=0): + """Display docs (code=0) or syntax help and exit (code!=0)""" + if code == 0: + print(__doc__) + else: + print(f"Syntax: {os.path.basename(sys.argv[0])} -y|--year=YEARS -p -position=LAT,LON") + print("\t[-i|--interpolate=MINUTES|METHOD]\n\t[-g|--glm[=GLMNAME]] [-n|--name=OBJECTNAME] [-c|--csv=CSVNAME]\n\t[--whoami] [--signup=EMAIL] [--apikey[=APIKEY]]\n\t[--test] [-v|--verbose] [-h|--help|help]") + exit(code) + +def verbose(msg): + """Display a verbose message (verbose_enable must be True""" + if verbose_enable: + print(f"[{os.path.basename(sys.argv[0])}]: {msg}",file=sys.stderr) + +def getemail(): + """Get the default email""" + global email + if not email: + keys = getkeys().keys() + if keys: + email = list(getkeys().keys())[0] + else: + email = None + return email + +def addkey(apikey=None): + """Manage NSRDB API keys""" + global email + global credential_file + if not email: + email = getemail() + keys = getkeys() + if email: + if apikey or not email in keys.keys(): + keys[email] = apikey + elif not apikey and email in keys.keys(): + del keys[email] + with open(credential_file,"w") as f: + json.dump(keys,f) + +def getkeys(): + """Get all NSRDB API keys""" + global credential_file + try: + with open(credential_file,"r") as f: + keys = json.load(f) + except: + keys = {} + return keys + +def getkey(email=None): + """Get a single NSRDB API key""" + if not email: + email = getemail() + if email: + return getkeys()[email] + else: + return None + +def getyears(years,lat,lon,concat=True): + """Get NSRDB weather data for multiple years""" + try: + result = {} + for year in years: + data = getyear(year,lat,lon) + if result: + for key,value in result.items(): + result[key].extend(data[key]) + else: + result = data + if concat: + result["DataFrame"] = pandas.concat(result["DataFrame"]) + if interpolate_time: + final = [] + if concat: + dflist = [result["DataFrame"]] + else: + dflist = result["DataFrame"] + for data in dflist: + verbose(f"getyears(years={years},lat={lat},lon={lon}): interpolating {interval} minute data to {interpolate_time} minutes using {interpolate_method} method") + starttime = data.index.min() + stoptime = data.index.max() + daterange = pandas.DataFrame(index=pandas.date_range(starttime,stoptime,freq=f"{interpolate_time}min")) + final.append(data.join(daterange,how="outer",sort=True).interpolate(interpolate_method)) + if concat: + result["DataFrame"] = pandas.concat(final) + else: + result["DataFrame"] = final + return result + except Exception as err: + if verbose_enable: + raise + else: + error(f"unable to get data ({err})",2) + +def heat_index(T,RH): + """Compute the heat index for a temperature T (in degF) and relative humidity RH (in %)""" + if T < 80 : + return 0.75*T + 0.25*( 61.0+1.2*(T-68.0)+0.094*RH) + else: + HI = -42.379 \ + + 2.04901523*T \ + + 10.14333127*RH \ + - 0.22475541*T*RH \ + - 0.00683783*T*T \ + - 0.05481717*RH*RH \ + + 0.00122874*T*T*RH \ + + 0.00085282*T*RH*RH \ + - 0.00000199*T*T*RH*RH + if RH < 13 and T < 112: + return HI - ((13-RH)/4)*sqrt((17-fabs(T-95.))/17) + elif RH > 85 and T < 87: + return HI + ((RH-85)/10) * ((87-T)/5) + else: + return HI + +def getyear(year,lat,lon): + """Get NSRDB weather data for a single year""" + api = getkey() + url = f"{server}?wkt=POINT({lon}%20{lat})&names={year}&leap_day={str(leap).lower()}&interval={interval}&utc={str(utc).lower()}&api_key={api}&attributes={attributes}&email={email}&full_name=None&affiliation=None&mailing_list=false&reason=None" + cache = f"{cachedir}/nsrdb/{year}/{geohash(lat,lon)}.csv" + try: + result = pandas.read_csv(cache,nrows=1).to_dict(orient="list") + result.update(dict(Year=[year],DataFrame=[pandas.read_csv(cache,skiprows=2)])) + verbose(f"getyear(year={year},lat={lat},lon={lon}): reading data from {cache}") + except: + result = None + if not result: + os.makedirs(os.path.dirname(cache),exist_ok=True) + with open(cache,"w") as fout: + verbose(f"getyear(year={year},lat={lat},lon={lon}): downloading data from {url}") + fout.write(requests.get(url).content.decode("utf-8")) + verbose(f"getyear(year={year},lat={lat},lon={lon}): saved data to {cache}") + result = pandas.read_csv(cache,nrows=1).to_dict(orient="list") + result.update(dict(Year=[year],DataFrame=[pandas.read_csv(cache,skiprows=2)])) + for data in result["DataFrame"]: + data["datetime"] = list(map(lambda x: datetime.datetime(x[0,0],x[0,1],x[0,2],x[0,3],0,0),numpy.matrix([data.Year,data.Month,data.Day,data.Hour]).transpose())) + data.set_index("datetime",inplace=True) + data.drop(columns=["Year","Day","Month","Hour","Minute"],inplace=True) + data.columns = [ + "solar_global[W/sf]", + "solar_horizontal[W/sf]", + "solar_direct[W/sf]", + "clouds", + "dewpoint[degF]", + "temperature[degF]", + "ground_reflectivity[pu]", + "wind_speed[m/s]", + "wind_dir[rad]", + "solar_altitude[deg]", + "humidity[%]", + "pressure[mbar]", + ] + data["solar_global[W/sf]"] /= 10.7639 + data["solar_horizontal[W/sf]"] /= 10.7639 + data["solar_direct[W/sf]"] /= 10.7639 + data["dewpoint[degF]"] = data["dewpoint[degF]"]*9/5+32 + data["temperature[degF]"] = data["temperature[degF]"]*9/5+32 + data["wind_dir[rad]"] *= 3.141592635/180 + data["heat_index[degF]"] = list(map(lambda x:heat_index(x[0],x[1]),zip(data["temperature[degF]"],data["humidity[%]"]))) + data.index.name = "datetime" + return result + +def geohash(latitude, longitude, precision=geocode_precision): + """Encode a position given in float arguments latitude, longitude to + a geohash which will have the character count precision. + """ + from math import log10 + __base32 = '0123456789bcdefghjkmnpqrstuvwxyz' + __decodemap = { } + for i in range(len(__base32)): + __decodemap[__base32[i]] = i + del i + lat_interval, lon_interval = (-90.0, 90.0), (-180.0, 180.0) + geohash = [] + bits = [ 16, 8, 4, 2, 1 ] + bit = 0 + ch = 0 + even = True + while len(geohash) < precision: + if even: + mid = (lon_interval[0] + lon_interval[1]) / 2 + if longitude > mid: + ch |= bits[bit] + lon_interval = (mid, lon_interval[1]) + else: + lon_interval = (lon_interval[0], mid) + else: + mid = (lat_interval[0] + lat_interval[1]) / 2 + if latitude > mid: + ch |= bits[bit] + lat_interval = (mid, lat_interval[1]) + else: + lat_interval = (lat_interval[0], mid) + even = not even + if bit < 4: + bit += 1 + else: + geohash += __base32[ch] + bit = 0 + ch = 0 + return ''.join(geohash) + + +def writeglm(data, glm=None, name=None, csv=None): + """Write weather object based on NSRDB data + + Default GLM and CSV values are handled as follows + GLM CSV Output + ------ ------ ------ + None None CSV->stdout + GLM None GLM, CSV->GLM/.glm/.csv + None CSV GLM->stdout, CSV + GLM CSV GLM, CSV + + The default name is "weather@GEOCODE" + + The WEATHER global is set to the list of weather object names. + """ + lat = data['Latitude'][0] + lon = data['Longitude'][0] + if not name: + name = f"weather@{geohash(lat,lon,geocode_precision)}" + if type(data["DataFrame"]) is list: + weather = pandas.concat(data["DataFrame"]) + else: + weather = data["DataFrame"] + if not csv and not glm: + weather.to_csv("/dev/stdout",header=True,float_format=float_format,date_format=date_format) + return dict(glm=None,csv="/dev/stdout",name=None) + if not glm: + glm = "/dev/stdout" + if not csv: + csv = f"{name}.csv" + with open(glm,"w") as f: + f.write("class weather\n{\n") + for column in weather.columns: + f.write(f"\tdouble {column};\n") + f.write("}\n") + weather.columns = list(map(lambda x:x.split('[')[0],weather.columns)) + f.write("module tape;\n") + f.write("#ifdef WEATHER\n") + f.write(f"#set WEATHER=$WEATHER {name}\n") + f.write("#else\n") + f.write(f"#define WEATHER={name}\n") + f.write("#endif\n") + f.write("object weather\n{\n") + f.write(f"\tname \"{name}\";\n") + f.write(f"\tlatitude {lat};\n") + f.write(f"\tlongitude {lon};\n") + f.write("\tobject player\n\t{\n") + f.write(f"\t\tfile \"{csv}\";\n") + f.write(f"\t\tproperty \"{','.join(weather.columns)}\";\n") + f.write("\t};\n") + f.write("}\n") + weather.to_csv(csv,header=False,float_format=float_format,date_format="%s") + return dict(glm=glm,csv=csv,name=name) + +if __name__ == "__main__": + year = None + position = None + glm = None + name = None + csv = None + if len(sys.argv) == 1: + syntax(1) + for arg in sys.argv[1:]: + args = arg.split("=") + if type(args) is list and len(args) > 1: + token = args[0] + value = args[1] + elif type(args) is list: + token = args[0] + value = None + else: + token = args + value = None + if token in ["-h","--help","help"]: + syntax() + elif token in ["-y","--year"]: + year = [] + for y in value.split(","): + yy = y.split("-") + if len(yy) == 1: + year.append(int(yy[0])) + elif len(yy) == 2: + year.extend(range(int(yy[0]),int(yy[1])+1)) + else: + raise Exception("'{value}' is not a valid invalid year specification") + elif token in ["-p","--position"]: + position = value.split(",") + if len(position) != 2: + error("position is not a tuple",1) + elif token in ["-i","--interpolate"]: + try: + interpolate_time = int(value) + except: + if value: + interpolate_method = value + else: + interpolate_time = None + elif token in ["-g","--glm"]: + glm = value + elif token in ["-n","--name"]: + name = value + elif token in ["-c","--csv"]: + csv = value + elif token == "--test": + year = [2014,2015] + position = [45.62,-122.70] + glm = "test.glm" + writeglm(getyears(year,float(position[0]),float(position[1])),glm,name,csv) + exit(os.system(f"gridlabd {glm}")) + elif token == "--signup": + if not value: + error("you must provide an email address for the new credential",1) + credentials = getkeys() + if getemail() in credentials.keys(): + error(f"you already have credentials for {value}",1) + else: + email = value + addkey("PASTE_YOUR_APIKEY_HERE") + import webbrowser + webbrowser.open("https://developer.nrel.gov/signup/") + print(f"use `gridlabd nsrdb_weather --apikey=` to set your api key") + elif token == "--apikey": + if not getemail(): + error(f"you have not signed up yet, use `gridlabd {os.path.basename(sys.argv[0]).replace('.py','')} --signup=` to sign up",1) + key = getkey(email) + addkey(value) + if not value: + print(f"key for {email} deleted, use `gridlabd {os.path.basename(sys.argv[0]).replace('.py','')} --apikey={key}` to restore it") + elif token == "--whoami": + if not getemail(): + error(f"you have not signed up yet, use `gridlabd {os.path.basename(sys.argv[0]).replace('.py','')} --signup=` to sign up",1) + print(email,file=sys.stdout) + elif token in ["-v","--verbose"]: + verbose_enable = not verbose_enable + elif token in ["-e","--encode"]: + position = value.split(",") + if len(position) != 2: + error("position is not a tuple",1) + print(geohash(float(position[0]),float(position[1])),file=sys.stdout) + else: + error(f"option '{token}' is not valid",1) + if position and year: + data = getyears(year,float(position[0]),float(position[1])) + writeglm(data,glm,name,csv) + +