Skip to content

Commit

Permalink
Merge pull request #333 from richardhsu/flake8fixes
Browse files Browse the repository at this point in the history
Fixing Flake8 Issues
  • Loading branch information
RiteshMaheshwari committed Apr 23, 2015
2 parents 7fb6e3f + be7daeb commit 75c5df2
Show file tree
Hide file tree
Showing 62 changed files with 1,330 additions and 789 deletions.
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1 +1 @@
global-include *.html *.js *.css VERSION requirements.txt
global-include *.html *.js *.css VERSION requirements.txt
16 changes: 12 additions & 4 deletions NOTICE
Original file line number Diff line number Diff line change
@@ -1,8 +1,16 @@
© 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
Copyright 2013 LinkedIn Corp. All rights reserved.

Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

List of other open-source software used/depended on by Naarad:

Expand Down
10 changes: 8 additions & 2 deletions bin/naarad
Original file line number Diff line number Diff line change
@@ -1,15 +1,21 @@
#!/usr/bin/env python
# coding=utf-8
"""
© 2013 LinkedIn Corp. All rights reserved.
Copyright 2013 LinkedIn Corp. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""

import logging
import os
import sys
Expand Down
3 changes: 2 additions & 1 deletion lib/luminol/demo/src/rca.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,8 @@ def _analyze(self):
# Correlate with other metrics
for entry in self.related_metrices:
try:
entry_correlation_result = Correlator(self.metrix, entry, time_period=(extended_start_t, extended_end_t), use_anomaly_score=True).get_correlation_result()
entry_correlation_result = Correlator(self.metrix, entry, time_period=(extended_start_t, extended_end_t),
use_anomaly_score=True).get_correlation_result()
record = extended_start_t, extended_end_t, entry_correlation_result.__dict__, entry
record_by_name = extended_start_t, extended_end_t, entry_correlation_result.__dict__
output[t].append(record)
Expand Down
15 changes: 11 additions & 4 deletions lib/luminol/demo/src/start.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,12 @@
DATA_PATH = 'static/data/'
SCORE_FILE_PATH = 'static/'


@app.route('/')
def index():
return render_template('index.html')


@app.route('/get_selection')
def get_selection():
fs = list()
Expand All @@ -29,6 +31,7 @@ def get_selection():
fs.append(f)
return jsonify(selection=fs)


@app.route('/detect')
def luminoldetect():
ts = urllib.unquote(request.args.get('ts_path')[1:])
Expand All @@ -50,7 +53,8 @@ def luminoldetect():
for key in anom_dict:
entry.append(anom_dict[key])
result.append(entry)
return jsonify(anom = result, anom_score = anom_scores)
return jsonify(anom=result, anom_score=anom_scores)


@app.route('/correlate')
def luminolanalyze():
Expand All @@ -64,13 +68,14 @@ def luminolanalyze():
result = myluminol.output_by_name
return jsonify(anom=result)


@app.route('/find_correlation_list')
def findCorrelationListPerAnomaly():
ts = urllib.unquote(request.args.get('ts')[1:])
all_ts = os.listdir(DATA_PATH)
matrices = list()
for t in all_ts:
t = DATA_PATH+t
t = DATA_PATH + t
if t.endswith('.csv') and t != ts:
matrices.append(t)
myluminol = RCA(ts, matrices)
Expand All @@ -80,16 +85,18 @@ def findCorrelationListPerAnomaly():
l = result[t]
data = list()
for entry in l:
data.append([entry[3]]+entry[2].values())
data.append([entry[3]] + entry[2].values())
data_sorted = sorted(data, key=lambda k: (-k[1], k[2], -k[3]))
r.append([t, data_sorted])
return jsonify(anom=r)


def write_csv(rows, name):
with open(SCORE_FILE_PATH + name, 'w+') as fp:
writer = csv.writer(fp)
writer.writerows(rows)


def to_epoch(anom):
r = list()
for a in anom:
Expand All @@ -101,4 +108,4 @@ def to_epoch(anom):

if __name__ == "__main__":
app.debug = True
app.run(host='0.0.0.0')
app.run(host='0.0.0.0')
5 changes: 3 additions & 2 deletions lib/luminol/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,9 @@
author='Naarad Developers',
author_email='[email protected]',
version=luminol_version,
packages=['luminol', 'luminol.algorithms', 'luminol.modules', 'luminol.algorithms.anomaly_detector_algorithms', 'luminol.algorithms.correlator_algorithms'],
package_dir={ '' : 'src'},
packages=['luminol', 'luminol.algorithms', 'luminol.modules', 'luminol.algorithms.anomaly_detector_algorithms',
'luminol.algorithms.correlator_algorithms'],
package_dir={'': 'src'},
install_requires=required,
license='Apache 2.0',
)
2 changes: 1 addition & 1 deletion lib/luminol/src/luminol/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,4 +47,4 @@ def get_root_causes(self):
Get root causes.
:return dict: a dict represents root causes for each anomaly.
"""
return getattr(self, 'causes', None)
return getattr(self, 'causes', None)
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from luminol.constants import *
from luminol.modules.time_series import TimeSeries


class AbsoluteThreshold(AnomalyDetectorAlgorithm):
"""
Anomalies are those data points that are above a pre-specified threshold value.
Expand Down Expand Up @@ -52,4 +53,3 @@ def _set_scores(self):
if self.absolute_threshold_value_lower and value < self.absolute_threshold_value_lower:
anom_scores[timestamp] = self.absolute_threshold_value_lower - value
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))

Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from luminol.algorithms.anomaly_detector_algorithms import *

anomaly_detector_algorithms = {
'bitmap_detector': bitmap_detector.BitmapDetector,
'default_detector': default_detector.DefaultDetector,
'derivative_detector': derivative_detector.DerivativeDetector,
'exp_avg_detector': exp_avg_detector.ExpAvgDetector,
'absolute_threshold': absolute_threshold.AbsoluteThreshold,
'diff_percent_threshold': diff_percent_threshold.DiffPercentThreshold
}
'bitmap_detector': bitmap_detector.BitmapDetector,
'default_detector': default_detector.DefaultDetector,
'derivative_detector': derivative_detector.DerivativeDetector,
'exp_avg_detector': exp_avg_detector.ExpAvgDetector,
'absolute_threshold': absolute_threshold.AbsoluteThreshold,
'diff_percent_threshold': diff_percent_threshold.DiffPercentThreshold
}
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def _construct_all_SAX_chunk_dict(self):

else:
# Just enter valid range.
if lag_dicts[i-1] is None:
if lag_dicts[i - 1] is None:
lag_dict = self._construct_SAX_chunk_dict(self.sax[i - lws: i])
lag_dicts[i] = lag_dict
lw_leave_chunk = self.sax[0:chunk_size]
Expand All @@ -150,12 +150,12 @@ def _construct_all_SAX_chunk_dict(self):

else:
# Update dicts according to leave_chunks and enter_chunks.
lag_dict = copy(lag_dicts[i-1])
lag_dict = copy(lag_dicts[i - 1])
lag_dict[lw_leave_chunk] -= 1
lag_dict[lw_enter_chunk] +=1
lag_dict[lw_enter_chunk] += 1
lag_dicts[i] = lag_dict

fut_dict = copy(fut_dicts[i-1])
fut_dict = copy(fut_dicts[i - 1])
fut_dict[fw_leave_chunk] -= 1
fut_dict[fw_enter_chunk] += 1
fut_dicts[i] = fut_dict
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ def _set_scores(self):
anom_scores = {}
for timestamp in anom_scores_ema.timestamps:
# Compute a weighted anomaly score.
anom_scores[timestamp] = max(anom_scores_ema[timestamp], anom_scores_ema[timestamp] * DEFAULT_DETECTOR_EMA_WEIGHT \
+ anom_scores_deri[timestamp] * (1 - DEFAULT_DETECTOR_EMA_WEIGHT))
anom_scores[timestamp] = max(anom_scores_ema[timestamp],
anom_scores_ema[timestamp] * DEFAULT_DETECTOR_EMA_WEIGHT + anom_scores_deri[timestamp] * (1 - DEFAULT_DETECTOR_EMA_WEIGHT))
# If ema score is significant enough, take the bigger one of the weighted score and deri score.
if anom_scores_ema[timestamp] > DEFAULT_DETECTOR_EMA_SIGNIFICANT:
anom_scores[timestamp] = max(anom_scores[timestamp], anom_scores_deri[timestamp])
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
from luminol.constants import *
from luminol.modules.time_series import TimeSeries


class DiffPercentThreshold(AnomalyDetectorAlgorithm):
"""
In this algorithm, anomalies are those data points that are above a percentage threshold as compared to the baseline.
Expand All @@ -35,7 +36,8 @@ def __init__(self, time_series, baseline_time_series, percent_threshold_upper=No
self.percent_threshold_upper = percent_threshold_upper
self.percent_threshold_lower = percent_threshold_lower
if not self.percent_threshold_upper and not self.percent_threshold_lower:
raise exceptions.RequiredParametersNotPassed('luminol.algorithms.anomaly_detector_algorithms.diff_percent_threshold: Either percent_threshold_upper or percent_threshold_lower needed')
raise exceptions.RequiredParametersNotPassed('luminol.algorithms.anomaly_detector_algorithms.diff_percent_threshold: \
Either percent_threshold_upper or percent_threshold_lower needed')

def _set_scores(self):
"""
Expand All @@ -60,4 +62,4 @@ def _set_scores(self):
if self.percent_threshold_lower and diff_percent < 0 and diff_percent < self.percent_threshold_lower:
anom_scores[timestamp] = -1 * diff_percent

self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))
self.anom_scores = TimeSeries(self._denoise_scores(anom_scores))
Original file line number Diff line number Diff line change
Expand Up @@ -87,4 +87,4 @@ def _set_scores(self):
"""
if self.use_lag_window:
self._compute_anom_data_using_window()
self._compute_anom_data_decay_all()
self._compute_anom_data_decay_all()
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ def run(self):
:return CorrelationResult: a CorrelationResult object represents the correlation result.
"""
self._detect_correlation()
return self.correlation_result
return self.correlation_result
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@
from luminol.algorithms.correlator_algorithms import *

correlator_algorithms = {
'cross_correlator': cross_correlator.CrossCorrelator
'cross_correlator': cross_correlator.CrossCorrelator
}
34 changes: 17 additions & 17 deletions lib/luminol/src/luminol/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@
DEFAULT_DERI_SMOOTHING_FACTOR = 0.2

ANOMALY_THRESHOLD = {
'exp_avg_detector': 3,
'default_detector': 3
'exp_avg_detector': 3,
'default_detector': 3
}

# Percentage threshold on anomaly score below which is considered noises.
Expand Down Expand Up @@ -80,18 +80,18 @@
DEFAULT_SHIFT_IMPACT = 0.05

TIMESTAMP_STR_FORMATS = [
'%Y%m%d_%H:%M:%S',
'%Y-%m-%d %H:%M:%S.%f',
'%Y%m%d %H:%M:%S',
'%Y-%m-%d_%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%f',
'%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S.%f%z',
'%Y%m%dT%H:%M:%S',
'%Y-%m-%d_%H:%M:%S.%f',
'%Y%m%d_%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%Y%m%dT%H:%M:%S.%f',
'%H:%M:%S',
'%Y%m%d %H:%M:%S.%f']
'%Y%m%d_%H:%M:%S',
'%Y-%m-%d %H:%M:%S.%f',
'%Y%m%d %H:%M:%S',
'%Y-%m-%d_%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%f',
'%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S.%f%z',
'%Y%m%dT%H:%M:%S',
'%Y-%m-%d_%H:%M:%S.%f',
'%Y%m%d_%H:%M:%S.%f',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%d %H:%M:%S',
'%Y%m%dT%H:%M:%S.%f',
'%H:%M:%S',
'%Y%m%d %H:%M:%S.%f']
2 changes: 1 addition & 1 deletion lib/luminol/src/luminol/correlator.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,4 +113,4 @@ def is_correlated(self, threshold=None):
Compare with a threshold to determine whether two timeseries correlate to each other.
:return: a CorrelationResult object if two time series correlate otherwise false.
"""
return self.correlation_result if self.correlation_result.coefficient >= threshold else False
return self.correlation_result if self.correlation_result.coefficient >= threshold else False
2 changes: 1 addition & 1 deletion lib/luminol/src/luminol/modules/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,4 +41,4 @@ def __str__(self):
return string representation of the anomaly
:return: string
"""
return "Anomaly from {0} to {1} with score {2}".format(self.start_timestamp, self.end_timestamp, self.anomaly_score)
return "Anomaly from {0} to {1} with score {2}".format(self.start_timestamp, self.end_timestamp, self.anomaly_score)
2 changes: 1 addition & 1 deletion lib/luminol/src/luminol/modules/correlation_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ def __init__(self, shift, coefficient, shifted_coefficient):
"""
self.shift = shift
self.coefficient = coefficient
self.shifted_coefficient = shifted_coefficient
self.shifted_coefficient = shifted_coefficient
2 changes: 1 addition & 1 deletion lib/luminol/src/luminol/modules/time_series.py
Original file line number Diff line number Diff line change
Expand Up @@ -371,4 +371,4 @@ def sum(self, default=None):
:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the sum or `None`.
"""
return numpy.asscalar(numpy.sum(self.values)) if self.values else default
return numpy.asscalar(numpy.sum(self.values)) if self.values else default
4 changes: 2 additions & 2 deletions lib/luminol/src/luminol/tests/run_tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,8 +96,8 @@ class TestLuminol(unittest.TestCase):
def setUp(self):
self.anomaly = ['A', 'B']
self.correlation = {
'A': ['m1', 'm2', 'm3'],
'B': ['m2', 'm1', 'm3']
'A': ['m1', 'm2', 'm3'],
'B': ['m2', 'm1', 'm3']
}
self.luminol = Luminol(self.anomaly, self.correlation)

Expand Down
7 changes: 5 additions & 2 deletions lib/luminol/src/luminol/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

from luminol import constants, exceptions


def compute_ema(smoothing_factor, points):
"""
Compute exponential moving average of a list of points.
Expand All @@ -35,6 +36,7 @@ def compute_ema(smoothing_factor, points):
ema.append(smoothing_factor * points[i] + (1 - smoothing_factor) * ema[i - 1])
return ema


def read_csv(csv_name):
"""
Read data from a csv file into a dictionary.
Expand All @@ -55,6 +57,7 @@ def read_csv(csv_name):
pass
return data


def to_epoch(t_str):
"""
Covert a timestamp string to an epoch number.
Expand All @@ -68,7 +71,7 @@ def to_epoch(t_str):
for format in constants.TIMESTAMP_STR_FORMATS:
try:
t = datetime.datetime.strptime(t_str, format)
return float(time.mktime(t.utctimetuple())*1000.0 + t.microsecond/1000.0)
return float(time.mktime(t.utctimetuple()) * 1000.0 + t.microsecond / 1000.0)
except:
pass
raise exceptions.InvalidDataFormat
raise exceptions.InvalidDataFormat
3 changes: 2 additions & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
[flake8]
ignore = E111
ignore = E111,E128
max-line-length = 160
exclude = MANIFEST.in,LICENSE,NOTICE,README.md,build,env,*requirements.txt,setup.cfg
Loading

0 comments on commit 75c5df2

Please sign in to comment.