From aa0f74f7bea8fa33f51189df1865614e3d99778b Mon Sep 17 00:00:00 2001 From: Matthew Taylor Date: Thu, 11 May 2017 12:37:34 -0700 Subject: [PATCH] Removes unused code from nupic.support (#3616) * Cleaned out nupic.support.__init__ * Removed unused code in nupic.support --- docs/README.md | 3 - src/nupic/support/__init__.py | 214 ------------------------------- src/nupic/support/datafiles.py | 205 ----------------------------- src/nupic/support/log_utils.py | 39 ------ src/nupic/support/loophelpers.py | 90 ------------- 5 files changed, 551 deletions(-) delete mode 100644 src/nupic/support/datafiles.py delete mode 100644 src/nupic/support/log_utils.py delete mode 100644 src/nupic/support/loophelpers.py diff --git a/docs/README.md b/docs/README.md index cd72f231c5..6ac524796f 100644 --- a/docs/README.md +++ b/docs/README.md @@ -180,7 +180,6 @@ nupic │   ├── configuration_base.py [TODO] │   ├── configuration_custom.py [TODO] │   ├── consoleprinter.py [TODO] -│   ├── datafiles.py [TODO] │   ├── decorators.py [TODO] │   ├── enum.py [TODO] │   ├── exceptions.py [TODO] @@ -190,8 +189,6 @@ nupic │   ├── fshelpers.py [TODO] │   ├── group_by.py [TODO] │   ├── lockattributes.py [TODO] -│   ├── log_utils.py [TODO] -│   ├── loophelpers.py [TODO] │   └── mysqlhelpers.py [TODO] ├── swarming │   ├── DummyModelRunner.py [TODO] diff --git a/src/nupic/support/__init__.py b/src/nupic/support/__init__.py index 278186bb32..4b7c1330a8 100644 --- a/src/nupic/support/__init__.py +++ b/src/nupic/support/__init__.py @@ -159,7 +159,6 @@ def foo(): callable_name, file_name, class_name = getCallerInfo(2) s = callable_name if class_name is not None: - method_name = s s = class_name + '.' + callable_name lines = (s + additional).split('\n') length = max(len(line) for line in lines) @@ -169,72 +168,6 @@ def foo(): -def bringToFront(title): - """Bring a top-level window with a given title - to the front on Windows""" - if sys.platform != 'win32': - return - - import ctypes - find_window = ctypes.windll.user32.FindWindowA - set_foreground_window = ctypes.windll.user32.SetForegroundWindow - hwnd = find_window(None, title) - if hwnd == 0: - raise Exception('There is no window titled: "%s"' % title) - set_foreground_window(hwnd) - - - -def getUserDocumentsPath(): - """ - Find the user's "Documents" directory (OS X), "My Documents" directory - (Windows), or home directory (Unix). - """ - - # OS X and Windows code from: - # http://www.blueskyonmars.com/2005/08/05 - # /finding-a-users-my-documents-folder-on-windows/ - # Alternate Windows code from: - # http://bugs.python.org/issue1763 - if sys.platform.startswith('win'): - if sys.platform.startswith('win32'): - # Try the primary method on 32-bit windows - try: - from win32com.shell import shell - alt = False - except ImportError: - try: - import ctypes - dll = ctypes.windll.shell32 - alt = True - except: - raise Exception("Could not find 'My Documents'") - else: - # Use the alternate method on 64-bit Windows - alt = True - if not alt: - # Primary method using win32com - df = shell.SHGetDesktopFolder() - pidl = df.ParseDisplayName(0, None, - "::{450d8fba-ad25-11d0-98a8-0800361b1103}")[1] - path = shell.SHGetPathFromIDList(pidl) - else: - # Alternate method using ctypes rather than win32com - buf = ctypes.create_string_buffer(300) - dll.SHGetSpecialFolderPathA(None, buf, 0x0005, False) - path = buf.value - elif sys.platform.startswith('darwin'): - from Carbon import Folder, Folders - folderref = Folder.FSFindFolder(Folders.kUserDomain, - Folders.kDocumentsFolderType, - False) - path = folderref.as_pathname() - else: - path = os.getenv('HOME') - return path - - - def getArgumentDescriptions(f): """ Get the arguments, default values, and argument descriptions for a function. @@ -329,23 +262,6 @@ def getArgumentDescriptions(f): -# TODO queryNumInwardIters appears to be unused and should probably be deleted -# from here altogether; it's likely an artifact of the legacy vision support. -#def queryNumInwardIters(configPath, radialLength, numRepetitions=1): -# """ -# Public utility API that accepts a config path and -# radial length, and determines the proper number of -# training iterations with which to invoke net.run() -# when running a PictureSensor in 'inward' mode. -# """ -# numCats = queryNumCategories(configPath) -# sequenceLen = radialLength + 1 -# numItersPerCat = (8 * radialLength) * sequenceLen -# numTrainingItersTP = numItersPerCat * numCats -# return numTrainingItersTP * numRepetitions - - - gLoggingInitialized = False def initLogging(verbose=False, console='stdout', consoleLevel='DEBUG'): """ @@ -491,20 +407,6 @@ def makeKey(name): -def reinitLoggingDir(): - """ (Re-)Initialize the loging directory for the calling application that - uses initLogging() for logging configuration - - NOTE: It's typially unnecessary to call this function directly since - initLogging takes care of it for you. This function is exposed primarily for - the benefit of nupic-services.py to allow it to restore its logging directory - after the hard-reset operation. - """ - if gLoggingInitialized and 'NTA_LOG_DIR' in os.environ: - makeDirectoryFromAbsolutePath(os.path.dirname(_genLoggingFilePath())) - - - def _genLoggingFilePath(): """ Generate a filepath for the calling app """ appName = os.path.splitext(os.path.basename(sys.argv[0]))[0] or 'UnknownApp' @@ -518,122 +420,6 @@ def _genLoggingFilePath(): -def enableLoggingErrorDebugging(): - """ Overrides the python logging facility's Handler.handleError function to - raise an exception instead of print and suppressing it. This allows a deeper - stacktrace to be emitted that is very helpful for quickly finding the - file/line that initiated the invalidly-formatted logging operation. - - NOTE: This is for debugging only - be sure to remove the call to this function - *before* checking in your changes to the source code repository, as it will - cause the application to fail if some invalidly-formatted logging statement - still exists in your code. - - Example usage: enableLoggingErrorDebugging must be called *after* - initLogging() - - import nupic.support - nupic.support.initLogging() - nupic.support.enableLoggingErrorDebugging() - - "TypeError: not all arguments converted during string formatting" is an - example exception that might be output by the built-in handlers with the - following very shallow traceback that doesn't go deep enough to show the - source of the problem: - - File ".../python2.6/logging/__init__.py", line 776, in emit - msg = self.format(record) - File ".../python2.6/logging/__init__.py", line 654, in format - return fmt.format(record) - File ".../python2.6/logging/__init__.py", line 436, in format - record.message = record.getMessage() - File ".../python2.6/logging/__init__.py", line 306, in getMessage - msg = msg % self.args - TypeError: not all arguments converted during string formatting - """ - - print >> sys.stderr, ("WARNING") - print >> sys.stderr, ("WARNING: " - "nupic.support.enableLoggingErrorDebugging() was " - "called to install a debugging patch into all logging handlers that " - "will cause the program to fail if a logging exception occurrs; this " - "call is for debugging only and MUST be removed before checking in code " - "into production system. Caller: %s") % ( - traceback.format_stack(),) - print >> sys.stderr, ("WARNING") - - def handleErrorPatch(*args, **kwargs): - if logging.raiseExceptions: - raise - - for handler in logging._handlerList: - handler.handleError = handleErrorPatch - - return - - - -def intTo8ByteArray(inValue): - """ - Converts an int to a packed byte array, with left most significant byte - """ - - values = ( - (inValue >> 56 ) & 0xff, - (inValue >> 48 ) & 0xff, - (inValue >> 40 ) & 0xff, - (inValue >> 32 ) & 0xff, - (inValue >> 24 ) & 0xff, - (inValue >> 16 ) & 0xff, - (inValue >> 8 ) & 0xff, - inValue & 0xff - ) - - s = struct.Struct('B B B B B B B B') - packed_data = s.pack(*values) - - return packed_data - - - -def byteArrayToInt(packed_data): - """ - Converts a byte array into an integer - """ - value = struct.unpack('B B B B B B B B', packed_data) - return value[0] << 56 | \ - value[1] << 48 | \ - value[2] << 40 | \ - value[3] << 32 | \ - value[4] << 24 | \ - value[5] << 16 | \ - value[6] << 8 | \ - value[7] - - - -def getSpecialRowID(): - """ - Special row id is 0xFF FFFF FFFF FFFF FFFF (9 bytes of 0xFF) - """ - values = (0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF) - s = struct.Struct('B B B B B B B B B') - packed_data = s.pack(*values) - - return packed_data - - - -_FLOAT_SECONDS_IN_A_DAY = 24.0 * 60.0 * 60.0 -def floatSecondsFromTimedelta(td): - """ Convert datetime.timedelta to seconds in floating point """ - sec = (td.days * _FLOAT_SECONDS_IN_A_DAY + td.seconds * 1.0 + - td.microseconds / 1E6) - - return sec - - - def aggregationToMonthsSeconds(interval): """ Return the number of months and seconds from an aggregation dict that diff --git a/src/nupic/support/datafiles.py b/src/nupic/support/datafiles.py deleted file mode 100644 index ff24b6cdfa..0000000000 --- a/src/nupic/support/datafiles.py +++ /dev/null @@ -1,205 +0,0 @@ -# ---------------------------------------------------------------------- -# Numenta Platform for Intelligent Computing (NuPIC) -# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement -# with Numenta, Inc., for a separate license for this software code, the -# following terms and conditions apply: -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero Public License version 3 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the GNU Affero Public License for more details. -# -# You should have received a copy of the GNU Affero Public License -# along with this program. If not, see http://www.gnu.org/licenses. -# -# http://numenta.org/licenses/ -# ---------------------------------------------------------------------- - - -# TODO for NUPIC 2 -- document the interface! -# TODO for NuPIC 2 -- should this move to inferenceanalysis? - - - - -def _calculateColumnsFromLine(line): - if "," in line: - splitLine = line.strip().split(",") - n = len(splitLine) - if n: - if not splitLine[-1].strip(): - return n-1 - else: - return n - else: - return 0 - else: - # Too flexible. - # return len([x for x in line.strip().split() if x != ","]) - return len(line.strip().split()) - -def _isComment(strippedLine): - if strippedLine: - return strippedLine.startswith("#") - else: - return True - -def _calculateColumnsFromFile(f, format, rewind): - # Calculate the number of columns. - # We will put more trust in the second line that the first, in case the - # first line includes header entries. - if format not in [0, 2, 3]: - raise RuntimeError("Supported formats are 0, 2, and 3.") - - if format == 0: - line0 = f.readline() - csplit = line0.split() - if len(csplit) != 1: - raise RuntimeError("Expected first line of data file to " - "contain a single number of columns. " - " Found %d fields" % len(csplit)) - try: - numColumns = int(csplit[0]) - except: - raise RuntimeError("Expected first line of data file to " - "contain a single number of columns. Found '%s'" % csplit[0]) - if rewind: - f.seek(0) - - return numColumns - - elif format == 2: - numColumns = 0 - numLinesRead = 0 - for line in f: - strippedLine = line.strip() - if not _isComment(strippedLine): - curColumns = _calculateColumnsFromLine(strippedLine) - numLinesRead += 1 - if numColumns and (numColumns != curColumns): - raise RuntimeError("Different lines have different " - "numbers of columns.") - else: - numColumns = curColumns - if numLinesRead > 1: - break - if rewind: - f.seek(0) - return numColumns - - # CSV file: we'll just check the first line - elif format == 3: - strippedLine = f.readline().strip() - numColumns = calculateColumnsFromLine(strippedLine) - if rewind: - f.seek(0) - return numColumns - -def processCategoryFile(f, format, categoryColumn=None, categoryColumns=None, count=1): - """Read the data out of the given category file, returning a tuple - (categoryCount, listOfCategories) - - @param f A file-like object containing the category info. - @param format The format of the category file. TODO: describe. - @param categoryColumn If non-None, this is the column number (zero-based) - where the category info starts in the file. If - None, indicates that the file only contains category - information (same as passing 0, but allows some - extra sanity checking). - @param categoryColumns Indicates how many categories are active per - timepoint (how many elements wide the category info - is). If 0, we'll determine this from the file. If - None (the default), means that the category info - is 1 element wide, and that the list we return - will just be a list of ints (rather than a list of - lists) - @param count Determines the size of chunks that will be aggregated - into a single entry. The default is 1, so each entry - from the file will be represented in the result. If - count > 1 then 'count' categories (all identical) will - be collapsed into a single entry. This is helpful for - aggregating explorers like EyeMovements where multiple - presentaions are conceptually the same item. - @return categoryCount The number of categories (aka maxCat + 1) - @return allCategories A list of the categories read in, with one item per - time point. If 'categoryColumns' is None, each item - will be an int. Otherwise, each item will be a list - of ints. If count > 1 then the categories will be - aggregated, so that each chunk of 'count' categories - will result in only one entry (all categories in a chunk - must be identical) - """ - calculatedCategoryColumns = _calculateColumnsFromFile(f, format=format, - rewind=(format==2 or format==3)) - - # If the user passed categoryColumns as None, we'll return a list of ints - # directly; otherwise we'll return a list of lists... - wantListOfInts = (categoryColumns is None) - - # Get arguments sanitized... - if categoryColumns == 0: - # User has told us to auto-calculate the # of categories / time point... - - # If categoryColumn is not 0 or None, that's an error... - if categoryColumn: - raise RuntimeError("You can't specify an offset for category data " - "if using automatic width.") - - categoryColumn = 0 - categoryColumns = calculatedCategoryColumns - elif categoryColumns is None: - # User has told us that there's just one category... - - if categoryColumn is None: - if calculatedCategoryColumns != 1: - raise RuntimeError("Category file must contain exactly one column.") - categoryColumn = 0 - - categoryColumns = 1 - else: - # User specified exactly how big the category data is... - - if (categoryColumns + categoryColumn) > calculatedCategoryColumns: - raise RuntimeError("Not enough categories in file") - - maxCategory = 0 - - allCategories = [] - for line in f: - strippedLine = line.strip() - if not _isComment(strippedLine): - if wantListOfInts: - category = int(strippedLine.split()[categoryColumn]) - allCategories.append(category) - maxCategory = max(maxCategory, category) - else: - categories = strippedLine.split()[categoryColumn: - categoryColumn+categoryColumns] - categories = map(int, categories) - allCategories.append(categories) - maxCategory = max(maxCategory, max(categories)) - - categoryCount = maxCategory + 1 - - # Aggregate categories - result = [] - if count > 1: - # Make sure there the number of categories can be aggregated - # exactly by chunks of size 'count' - assert len(allCategories) % count == 0 - start = 0 - for i in range(len(allCategories) / count): - end = start + count - # Make sure each chunk of size 'count' contains exactly one category - assert (min(allCategories[start:end]) == max(allCategories[start:end])) - # Add just one entry for each chunk - result.append(allCategories[start]) - start = end - else: - result = allCategories - - return categoryCount, result diff --git a/src/nupic/support/log_utils.py b/src/nupic/support/log_utils.py deleted file mode 100644 index 904a1cce9f..0000000000 --- a/src/nupic/support/log_utils.py +++ /dev/null @@ -1,39 +0,0 @@ -# ---------------------------------------------------------------------- -# Numenta Platform for Intelligent Computing (NuPIC) -# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement -# with Numenta, Inc., for a separate license for this software code, the -# following terms and conditions apply: -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero Public License version 3 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the GNU Affero Public License for more details. -# -# You should have received a copy of the GNU Affero Public License -# along with this program. If not, see http://www.gnu.org/licenses. -# -# http://numenta.org/licenses/ -# ---------------------------------------------------------------------- - -# This file contains utility functions that are used -# internally by the prediction framework. It should not be -# imported by description files. (see helpers.py) - - -import logging -import inspect - -def createLogger(obj): - """Helper function to create a logger object for the current object with - the standard Numenta prefix """ - if inspect.isclass(obj): - myClass = obj - else: - myClass = obj.__class__ - logger = logging.getLogger(".".join( - ['com.numenta', myClass.__module__, myClass.__name__])) - return logger diff --git a/src/nupic/support/loophelpers.py b/src/nupic/support/loophelpers.py deleted file mode 100644 index 9c45f28c3e..0000000000 --- a/src/nupic/support/loophelpers.py +++ /dev/null @@ -1,90 +0,0 @@ -# ---------------------------------------------------------------------- -# Numenta Platform for Intelligent Computing (NuPIC) -# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement -# with Numenta, Inc., for a separate license for this software code, the -# following terms and conditions apply: -# -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU Affero Public License version 3 as -# published by the Free Software Foundation. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -# See the GNU Affero Public License for more details. -# -# You should have received a copy of the GNU Affero Public License -# along with this program. If not, see http://www.gnu.org/licenses. -# -# http://numenta.org/licenses/ -# ---------------------------------------------------------------------- - -''' -A fault tolerant loop -''' -import time -import smtplib -import sys - -from datetime import datetime, timedelta -from email.mime.text import MIMEText - - - -def patientLoop(logger, maxWaitExponent, finalErrorString, acceptableError, - acceptErrorString, callable, argumentList, email = False, - noReturn = False): - ''' - logger Logging object - maxWaitExponent 2 ** maxWaitExponent defines max wait time - finalErrorString Msg to log if wait time expires - acceptableError The expected error for the loop - acceptErrorString Msg to log when expected error is encountered - callable The method/function to attempt in the loop - arguments Arguments to the method/function - email An address to send a failure message to. - ''' - - exponent = 0 - lastErrorTime = datetime(1970,1,1) - # Exit only on success (return) or exceed wait time (sys.exit(1)) - while True: - if exponent > maxWaitExponent: - logger.error(finalErrorString) - if email: - msg = MIMEText(logger.name) - msg['Subject'] = 'URGENT - %s' % finalErrorString - me = 'patientloop@numenta.com' - you = email - msg['From'] = me - msg['To'] = you - s = smtplib.SMTP('localhost') - s.sendmail(me, [you], msg.as_string()) - s.quit() - sys.exit(1) - timeout = 2 ** exponent - try: - if noReturn: - callable(*argumentList) - return - else: - rv = callable(*argumentList) - return rv - except acceptableError: - logger.warn(acceptErrorString) - # Store the time we encountered this error - errorTime = datetime.utcnow() - ''' - If the time between this error and the last one is larger than the max - time we are willing to time out, then we know there was a re-connection - in between the errors. Reset our counter - ''' - if errorTime - lastErrorTime > timedelta(seconds=(2**maxWaitExponent)): - exponent = 0 - logger.info('Sleeping for %d seconds before attempting again ...' \ - % timeout ) - time.sleep(timeout) - # Store our error time for checking next time around - lastErrorTime = errorTime - # Back off on the wait time - exponent += 1