Suppress pep8 warnings that are not fixed yet

The code has over 2000 warnings related to indentation and line length
Suppressing these warnings allow us to fix those warnings in manageable code chunks

In addition to suppressing many of the warnings, this submission fixes
 W191 indentation contains tabs
 W291 trailing whitespace
 W292 no newline at end of file
 W293 blank line contains whitespace
 W391 blank line at end of file
 W503 line break before binary operator

Change-Id: If40f73d7312aec574d8141ea1af2cc6f1b4b8a46
This commit is contained in:
Al Bailey 2018-06-15 13:00:36 -05:00
parent 37f3cabe55
commit b5c61d006c
18 changed files with 481 additions and 457 deletions

View File

@ -135,4 +135,4 @@ if __name__ == '__main__':
timers.timers_finalize() timers.timers_finalize()
selobj.selobj_finalize() selobj.selobj_finalize()
debug.debug_finalize() debug.debug_finalize()

View File

@ -7,10 +7,10 @@
# #
################################################################################ ################################################################################
# #
# Description: Un-gzips nfv-vim log files within the same directory and generates # Description: Un-gzips nfv-vim log files within the same directory and generates
# CSVs based on the processes found related to histogram data, # CSVs based on the processes found related to histogram data,
# naming each CSV after the process found in the log files whose # naming each CSV after the process found in the log files whose
# log data it is storing. Each CSV file contains 3 columns of # log data it is storing. Each CSV file contains 3 columns of
# data: TIMESTAMP, average execution time, hits per sample # data: TIMESTAMP, average execution time, hits per sample
# This script is meant to be used with the plotter.py data visualization # This script is meant to be used with the plotter.py data visualization
# script. # script.
@ -27,9 +27,9 @@
# average execution time which is each execution time in the sample, # average execution time which is each execution time in the sample,
# multiplied by its respective number of hits during that sample, and # multiplied by its respective number of hits during that sample, and
# then all of these values summed together, divided by the sum comprising # then all of these values summed together, divided by the sum comprising
# the total number of hits for that particular sample. The third column # the total number of hits for that particular sample. The third column
# is simply the total number of hits for that same (across all execution # is simply the total number of hits for that same (across all execution
# times). # times).
# #
# Place this script in a directory containing the gzipped or ungzipped logs you would # Place this script in a directory containing the gzipped or ungzipped logs you would
# like to generate CSV files for. # like to generate CSV files for.
@ -57,77 +57,80 @@ call("cp nfv-vim.log nfv-vim.log.[0-9] nfv-vim.log.[0-9][0-9] nfv-vim.log.[0-9].
call("gunzip logs/nfv-vim.log.[0-9].gz logs/nfv-vim.log.[0-9][0-9].gz", shell=True) call("gunzip logs/nfv-vim.log.[0-9].gz logs/nfv-vim.log.[0-9][0-9].gz", shell=True)
class Parser: class Parser:
def __init__(self): def __init__(self):
self.proc="" # Name of process being read self.proc="" # Name of process being read
self.timestamp="" # Timestamp found on line stating process name self.timestamp="" # Timestamp found on line stating process name
self.write=False # Flag indicating data has yet to be written self.write=False # Flag indicating data has yet to be written
self.stored=False # Flag indicating that there is new data stored self.stored=False # Flag indicating that there is new data stored
self.length=0 # Time duration of process self.length=0 # Time duration of process
self.instanceCount=0 # Number of hits for the particular duration self.instanceCount=0 # Number of hits for the particular duration
self.rollingCount=0 # Sum of the hits for each duration parsed within the sample self.rollingCount=0 # Sum of the hits for each duration parsed within the sample
self.total=0 # Specific duration multiplied by number of hits for that duration self.total=0 # Specific duration multiplied by number of hits for that duration
self.avg=0 # Average execution time of process self.avg=0 # Average execution time of process
self.unit="" # Unit execution time was recorded in self.unit="" # Unit execution time was recorded in
self.csvs=defaultdict(list) # Stores unique processes in a dict of lists self.csvs=defaultdict(list) # Stores unique processes in a dict of lists
# Resets variables when a new process begins to be read in logs # Resets variables when a new process begins to be read in logs
def reset(self): def reset(self):
self.length=0 self.length=0
self.avg=0 self.avg=0
self.instanceCount=0 self.instanceCount=0
self.rollingCount=0 self.rollingCount=0
self.total=0 self.total=0
self.proc="" self.proc=""
self.unit="" self.unit=""
self.write=False self.write=False
self.stored=False self.stored=False
# Adds log data for a process to the csvs dictionary # Adds log data for a process to the csvs dictionary
def add(self,proc,total,timestamp,rollingCount): def add(self,proc,total,timestamp,rollingCount):
if rollingCount != 0: if rollingCount != 0:
avg=total/float(rollingCount) avg=total/float(rollingCount)
else: else:
avg=0 avg=0
self.csvs[proc].append(timestamp+","+str(avg)+","+str(rollingCount)+",") self.csvs[proc].append(timestamp+","+str(avg)+","+str(rollingCount)+",")
self.reset() self.reset()
def main(self):
# Sorts the log files to read them in descending order
sorted_files = glob.glob(logDir+"nfv-vim.log*")
sorted_files.sort(reverse=True)
for logFile in sorted_files:
with open(logFile,"r+") as f:
cfgLines = f.read().splitlines()
for line in cfgLines:
if "Histogram" in line:
if self.write or self.stored:
self.add(self.proc,self.total,self.timestamp,self.rollingCount)
self.write=True
self.proc=line.partition("Histogram: ")[2]
self.proc=("".join(self.proc.split())).rstrip(':')
self.timestamp=line.split()[0]
elif "histogram.py" in line:
line=line.split()
self.length=int(line[8])
self.unit=line[9]
self.instanceCount=int(line[10])
if "decisecond" in self.unit:
self.length*=100
elif "secs" in self.unit:
self.length*=1000
self.total=self.total+self.instanceCount*self.length
self.rollingCount+=self.instanceCount
self.stored=True
f.close()
if self.write or self.stored:
self.add(self.proc,self.total,self.timestamp,self.rollingCount)
for process in self.csvs: def main(self):
with open(os.path.join(csvDir,process+".csv"),'w+') as csvOut: # Sorts the log files to read them in descending order
for line in self.csvs[process]: sorted_files = glob.glob(logDir+"nfv-vim.log*")
csvOut.write(line+"\n") sorted_files.sort(reverse=True)
csvOut.close() for logFile in sorted_files:
with open(logFile,"r+") as f:
cfgLines = f.read().splitlines()
for line in cfgLines:
if "Histogram" in line:
if self.write or self.stored:
self.add(self.proc,
self.total,
self.timestamp,
self.rollingCount)
self.write=True
self.proc=line.partition("Histogram: ")[2]
self.proc=("".join(self.proc.split())).rstrip(':')
self.timestamp=line.split()[0]
elif "histogram.py" in line:
line=line.split()
self.length=int(line[8])
self.unit=line[9]
self.instanceCount=int(line[10])
if "decisecond" in self.unit:
self.length*=100
elif "secs" in self.unit:
self.length*=1000
self.total=self.total+self.instanceCount*self.length
self.rollingCount+=self.instanceCount
self.stored=True
f.close()
if self.write or self.stored:
self.add(self.proc,self.total,self.timestamp,self.rollingCount)
for process in self.csvs:
with open(os.path.join(csvDir,process+".csv"),'w+') as csvOut:
for line in self.csvs[process]:
csvOut.write(line+"\n")
csvOut.close()
process=Parser() process=Parser()
process.main() process.main()
print "\nComplete\n" print "\nComplete\n"

View File

@ -11,11 +11,11 @@
# saves it locally. # saves it locally.
# #
# Behaviour : The script takes in arguments from the command line such as specific # Behaviour : The script takes in arguments from the command line such as specific
# process names, or the name of a grouping of processes, and graphs # process names, or the name of a grouping of processes, and graphs
# them in a local html file using plotly. The x-axis displays # them in a local html file using plotly. The x-axis displays
# datestamps corresponding to when the sample in the csv file was taken, # datestamps corresponding to when the sample in the csv file was taken,
# and the y-axis displays either the average execution time of the # and the y-axis displays either the average execution time of the
# processes during each sample, or the difference in total hits that # processes during each sample, or the difference in total hits that
# process experienced from one sample period to the previous sample # process experienced from one sample period to the previous sample
# period. Both average execution times and the delta hit count can # period. Both average execution times and the delta hit count can
# be displayed on the same graph using two y-axes. The CSV files must # be displayed on the same graph using two y-axes. The CSV files must
@ -24,14 +24,14 @@
# script is run and will automatically populate itself with all processes # script is run and will automatically populate itself with all processes
# listed in the csv/ directory. Change the N to a Y in the right column # listed in the csv/ directory. Change the N to a Y in the right column
# of the config file to have that process graphed when running this script # of the config file to have that process graphed when running this script
# via config settings. Groupings of processes can also be made under the # via config settings. Groupings of processes can also be made under the
# groups section by following the same N/Y format as above. When a group # groups section by following the same N/Y format as above. When a group
# name is specified all processes listed under that group name will be # name is specified all processes listed under that group name will be
# graphed if they have a Y in their right-column. # graphed if they have a Y in their right-column.
# #
# To run this script ensure that plotly is installed. # To run this script ensure that plotly is installed.
# To do this enter: sudo pip install plotly # To do this enter: sudo pip install plotly
# #
# #
# If no arguments are entered when running this script it will default to running # If no arguments are entered when running this script it will default to running
# the proceses in logplot.cfg with a Y in their rightmost column, and will display # the proceses in logplot.cfg with a Y in their rightmost column, and will display
@ -70,146 +70,146 @@ graphName=""
plotType="" plotType=""
def helpMessage(): def helpMessage():
print "\n"+"-"*120 print "\n"+"-"*120
print "NFV-VIM Histogram Graphing Script\n" print "NFV-VIM Histogram Graphing Script\n"
print "This script is meant to graph average execution times and the delta of hits between sample periods for processes in nfv-vim logs.\n" print "This script is meant to graph average execution times and the delta of hits between sample periods for processes in nfv-vim logs.\n"
print "Usage:\n" print "Usage:\n"
print(" -c ... runs from the logplot.cfg (c)onfig file. All processes in the first list with a Y\n" print(" -c ... runs from the logplot.cfg (c)onfig file. All processes in the first list with a Y\n"
" in the far-right column will be included in the generated graph.\n") " in the far-right column will be included in the generated graph.\n")
print(" -d ... command used to specify a (d)ate range within which you would like to see log data.\n" print(" -d ... command used to specify a (d)ate range within which you would like to see log data.\n"
" The format is YYYY/MM/DD-YYYY/MM/DD with the lower bound on the left, and the upper\n" " The format is YYYY/MM/DD-YYYY/MM/DD with the lower bound on the left, and the upper\n"
" bound on the right. The range is up to and including the bounds. To have a bound simply\n" " bound on the right. The range is up to and including the bounds. To have a bound simply\n"
" cover all datestamps before or after a bound, omit the undefined bound. Only one bound can be\n" " cover all datestamps before or after a bound, omit the undefined bound. Only one bound can be\n"
" unspecified in this way.\n" " unspecified in this way.\n"
" e.g. -d 2016/12/01-2016/12/12\n" " e.g. -d 2016/12/01-2016/12/12\n"
" -d -2016/12/12 To use all logs prior to and including 2016/12/12\n" " -d -2016/12/12 To use all logs prior to and including 2016/12/12\n"
" -d 2016/12/01- To use all logs after and including 2016/12/01\n") " -d 2016/12/01- To use all logs after and including 2016/12/01\n")
print(" -t ... used to indicate that you would like the graph to display average execution (t)imes\n" print(" -t ... used to indicate that you would like the graph to display average execution (t)imes\n"
" along the y-axis.\n") " along the y-axis.\n")
print(" -h ... used to indicate that you would like the graph to display the dela of (h)its between\n" print(" -h ... used to indicate that you would like the graph to display the dela of (h)its between\n"
" sample periods on the y-axis.\n") " sample periods on the y-axis.\n")
print " -l ... used to set the graph to be a line graph. (Can be used with -m as well)\n" print " -l ... used to set the graph to be a line graph. (Can be used with -m as well)\n"
print " -m ... used to set the graph to be a scatterplot. (Can be used with -l as well)\n" print " -m ... used to set the graph to be a scatterplot. (Can be used with -l as well)\n"
print(" -lm ... used to set the graph to be a scatterplot with connecting lines. This same effect can also be\n" print(" -lm ... used to set the graph to be a scatterplot with connecting lines. This same effect can also be\n"
" achieved by using -l -m\n") " achieved by using -l -m\n")
print(" -n ... used to (n)ame the file that will be generated by this script. Files can be found in the\n" print(" -n ... used to (n)ame the file that will be generated by this script. Files can be found in the\n"
" Graphs/ directory, found inside the directory containing this script. Do not include spaces in\n" " Graphs/ directory, found inside the directory containing this script. Do not include spaces in\n"
" the file name. If no name is specified, the name will default to the timestamp from when the\n" " the file name. If no name is specified, the name will default to the timestamp from when the\n"
" script was run.\n" " script was run.\n"
" e.g. 01-24-2017.html\n") " e.g. 01-24-2017.html\n")
print(" -oneaxis ... used to generate a graph with two Y-axes sharing an x-axis. Average execution time's y-axis is\n" print(" -oneaxis ... used to generate a graph with two Y-axes sharing an x-axis. Average execution time's y-axis is\n"
" on the right, and delta Hits per sample's y-axis is on the left. Used to look for correlations. The \n" " on the right, and delta Hits per sample's y-axis is on the left. Used to look for correlations. The \n"
" name of the process being graphed will have _time or _hits appended to it so you can tell which\n" " name of the process being graphed will have _time or _hits appended to it so you can tell which\n"
" y-axis to relate it to. Only works if both -h and -t flags are used. Can be used for multiple processes.\n" " y-axis to relate it to. Only works if both -h and -t flags are used. Can be used for multiple processes.\n"
" e.g. -h -t -oneaxis --p process1 process2\n") " e.g. -h -t -oneaxis --p process1 process2\n")
print(" --g ... will run the script for processes specified in logplot.cfg under the (G)roups heading.\n" print(" --g ... will run the script for processes specified in logplot.cfg under the (G)roups heading.\n"
" All processes listed under the named group's heading will be included in the graph.\n" " All processes listed under the named group's heading will be included in the graph.\n"
" Space-delimit the groups to be included. This must be the last command entered.\n" " Space-delimit the groups to be included. This must be the last command entered.\n"
" e.g. --g group1 group2\n") " e.g. --g group1 group2\n")
print(" --p ... follow this with a space-delimited list of (p)rocesses you would like to graph together.\n" print(" --p ... follow this with a space-delimited list of (p)rocesses you would like to graph together.\n"
" This must be the last command entered.\n") " This must be the last command entered.\n")
print(" --update ... This will update the master list of process at the beginning of the logplot.cfg file:\n" print(" --update ... This will update the master list of process at the beginning of the logplot.cfg file:\n"
" Processes not currently listed in the master list will be added and their run status set to N.\n\n") " Processes not currently listed in the master list will be added and their run status set to N.\n\n")
print "Note: If neither the -t nor -h tag is used, the script will default to display the average execution time on the y-axis.\n\n" print "Note: If neither the -t nor -h tag is used, the script will default to display the average execution time on the y-axis.\n\n"
print "Examples:\n" print "Examples:\n"
print("./plotter.py -c -d 2016/12/3-2016/12/10 -t -n ConfigOutput_Dec_3-10 ... This will graph all processes with a Y in their\n" print("./plotter.py -c -d 2016/12/3-2016/12/10 -t -n ConfigOutput_Dec_3-10 ... This will graph all processes with a Y in their\n"
" right-most column in the config file, using logs\n" " right-most column in the config file, using logs\n"
" with a timestamp between Dec 3rd and 10th 2016,\n" " with a timestamp between Dec 3rd and 10th 2016,\n"
" and will display their average execution time in\n" " and will display their average execution time in\n"
" the y-axis. The file will be called\n" " the y-axis. The file will be called\n"
" ConfigOutput_Dec_3-10.html") " ConfigOutput_Dec_3-10.html")
print("./plotter.py -h -t --g group1 ... This will generate two graphs, one with the delta of hits\n" print("./plotter.py -h -t --g group1 ... This will generate two graphs, one with the delta of hits\n"
" on the y-axis, and the other with the average execution time\n" " on the y-axis, and the other with the average execution time\n"
" in the y-axis, for processes listed under group1 in\n" " in the y-axis, for processes listed under group1 in\n"
" logplot.cfg.\n" " logplot.cfg.\n"
" period will be displayed on the y-axis.\n") " period will be displayed on the y-axis.\n")
print("./plotter.py ... This will run the default settings, which are to run\n" print("./plotter.py ... This will run the default settings, which are to run\n"
" for the processes enabled in the master list in\n" " for the processes enabled in the master list in\n"
" the config file, to use log information for all dates\n" " the config file, to use log information for all dates\n"
" available, to show average execution time on the y-axis,\n" " available, to show average execution time on the y-axis,\n"
" and to name the file with the current day's datestamp.") " and to name the file with the current day's datestamp.")
print "-"*120 print "-"*120
# Appends new processes found via CSV filenames to the master process list in logplot.cfg if there are not already present. # Appends new processes found via CSV filenames to the master process list in logplot.cfg if there are not already present.
# If logplot.cfg has not been generated yet, this will create it and add process names found in filenames in ./csv # If logplot.cfg has not been generated yet, this will create it and add process names found in filenames in ./csv
def updater(configExists=True): def updater(configExists=True):
procs=[] procs=[]
existingProcs=[] existingProcs=[]
newProcs=[] newProcs=[]
position=0 # Tracks position of the end of the master process list so new processes can be added above it. position=0 # Tracks position of the end of the master process list so new processes can be added above it.
os.chdir(pth) os.chdir(pth)
for name in iglob("*.csv"): for name in iglob("*.csv"):
procs.append(str(name)[:-4]) procs.append(str(name)[:-4])
os.chdir("..") os.chdir("..")
if not configExists: if not configExists:
f=open(os.path.join(dir,'logplot.cfg'),"w") f=open(os.path.join(dir,'logplot.cfg'),"w")
for p in procs: for p in procs:
f.write(p+" "*(59-len(p))+"N\n") f.write(p+" "*(59-len(p))+"N\n")
f.write("#"*20+"END OF PROCESS LIST"+"#"*21+"\n\n") f.write("#"*20+"END OF PROCESS LIST"+"#"*21+"\n\n")
f.write("#"*27+"GROUPS"+"#"*27+"\n") f.write("#"*27+"GROUPS"+"#"*27+"\n")
f.write("#GroupSTART\n") f.write("#GroupSTART\n")
f.write("GroupName=ExampleGroupName1\n") f.write("GroupName=ExampleGroupName1\n")
f.write("ExampleProcessName1"+" "*40+"N\n") f.write("ExampleProcessName1"+" "*40+"N\n")
f.write("ExampleProcessName2"+" "*40+"N\n") f.write("ExampleProcessName2"+" "*40+"N\n")
f.write("#GroupEND\n") f.write("#GroupEND\n")
f.write("-"*60+"\n") f.write("-"*60+"\n")
f.write("GroupName=ExampleGroupName2\n") f.write("GroupName=ExampleGroupName2\n")
f.write("ExampleProcessName3"+" "*40+"N\n") f.write("ExampleProcessName3"+" "*40+"N\n")
f.write("ExampleProcessName4"+" "*40+"N\n") f.write("ExampleProcessName4"+" "*40+"N\n")
f.write("#GroupEND\n") f.write("#GroupEND\n")
f.write("#"*20+"END OF GROUPS"+"#"*27) f.write("#"*20+"END OF GROUPS"+"#"*27)
f.close() f.close()
else: else:
with open(os.path.join(dir,'logplot.cfg'),"r+") as f: with open(os.path.join(dir,'logplot.cfg'),"r+") as f:
cfgLines = f.read().splitlines() cfgLines = f.read().splitlines()
for cfgProc in cfgLines: for cfgProc in cfgLines:
if "#END" in cfgProc: if "#END" in cfgProc:
break break
existingProcs.append(cfgProc.split()[0]) existingProcs.append(cfgProc.split()[0])
position+=1 position+=1
for p in procs: for p in procs:
if p not in existingProcs: if p not in existingProcs:
newProcs.append(p+" "*(59-len(p))+"N") newProcs.append(p+" "*(59-len(p))+"N")
procs=cfgLines[:position]+newProcs+cfgLines[position:] procs=cfgLines[:position]+newProcs+cfgLines[position:]
f.seek(0) f.seek(0)
f.write("\n".join(procs)) f.write("\n".join(procs))
f.truncate() f.truncate()
f.close() f.close()
# Appends process names found in the specified group to the list of processes to be graphed. # Appends process names found in the specified group to the list of processes to be graphed.
def gCommand(groups): def gCommand(groups):
procs=[] procs=[]
f=open(os.path.join(dir,'logplot.cfg'),"r") f=open(os.path.join(dir,'logplot.cfg'),"r")
cfgLines=f.read().splitlines() cfgLines=f.read().splitlines()
for g in groups: for g in groups:
groupFound=False groupFound=False
finishedGroup=False finishedGroup=False
for i in xrange(len(cfgLines)): for i in xrange(len(cfgLines)):
liNum=i liNum=i
if str("GroupName="+g) == cfgLines[i].strip(): if str("GroupName="+g) == cfgLines[i].strip():
groupFound=True groupFound=True
linum=i linum=i
while not finishedGroup: while not finishedGroup:
liNum+=1 liNum+=1
if "GroupEND" in cfgLines[liNum]: if "GroupEND" in cfgLines[liNum]:
finishedGroup=True finishedGroup=True
else: else:
cfgLine=cfgLines[liNum].split() cfgLine=cfgLines[liNum].split()
if cfgLine[1]=="Y": if cfgLine[1]=="Y":
procs.append(cfgLine[0]) procs.append(cfgLine[0])
else: else:
break break
else: else:
if not groupFound: if not groupFound:
warnings.append("WARNING: The following group could not be found: %s\n\t\t Please check your logplot.cfg file for the intended group name."%(g,)) warnings.append("WARNING: The following group could not be found: %s\n\t\t Please check your logplot.cfg file for the intended group name."%(g,))
f.close() f.close()
return procs return procs
# Appends processes explicitly named by the user to the list of processes to be run. # Appends processes explicitly named by the user to the list of processes to be run.
@ -217,149 +217,149 @@ def gCommand(groups):
# a list of known processes containing the name they entered. If they enter one of the provided names, it will be added to the list. If the # a list of known processes containing the name they entered. If they enter one of the provided names, it will be added to the list. If the
# user enters "s", the process in question will be skipped and the script will continue. If they user enters "q" the script will exit. # user enters "s", the process in question will be skipped and the script will continue. If they user enters "q" the script will exit.
def pCommand(pList): def pCommand(pList):
procList=[] procList=[]
for i in xrange(len(pList)): for i in xrange(len(pList)):
csvFile=str(pList[i])+".csv" csvFile=str(pList[i])+".csv"
procName=str(pList[i]) procName=str(pList[i])
isFile=False isFile=False
if os.path.isfile(os.path.join(pth,csvFile)): if os.path.isfile(os.path.join(pth,csvFile)):
isFile = True isFile = True
procList.append(pList[i]) procList.append(pList[i])
else: else:
while(not isFile): while(not isFile):
print "\nFiles containing keyword: %s"%(str(procName)) print "\nFiles containing keyword: %s"%(str(procName))
csvFile=str(procName)+".csv" csvFile=str(procName)+".csv"
for root, directories, filenames in os.walk(pth): for root, directories, filenames in os.walk(pth):
for filename in filenames: for filename in filenames:
if procName.lower() in filename.lower(): if procName.lower() in filename.lower():
if (str(procName)+".csv") == str(filename): if (str(procName)+".csv") == str(filename):
isFile=True isFile=True
procList.append(str(procName).strip()) procList.append(str(procName).strip())
break break
else: else:
print " "+filename[:-4] print " "+filename[:-4]
else: else:
procName = str(raw_input("\nEnter the corrected process name, q to quit, or s to skip: ")).strip() procName = str(raw_input("\nEnter the corrected process name, q to quit, or s to skip: ")).strip()
if procName=="s": if procName=="s":
isFile=True isFile=True
break break
elif procName=="q": elif procName=="q":
sys.exit() sys.exit()
return procList return procList
# Stores the average execution time, or delta hit count data into into a plotly graph obj, and restricts sample to be within a certain # Stores the average execution time, or delta hit count data into into a plotly graph obj, and restricts sample to be within a certain
# date range if specified. If plots is 1, one graph will be generated. If plots is 2, two graphs will be generated with one above the other. # date range if specified. If plots is 1, one graph will be generated. If plots is 2, two graphs will be generated with one above the other.
def storeGraphData(procs, dateRange=[], execTime=False, hits=False, plots=1): def storeGraphData(procs, dateRange=[], execTime=False, hits=False, plots=1):
graphData={} graphData={}
prevHitTotal=0 prevHitTotal=0
timeList=[[] for p in xrange(len(procs))] timeList=[[] for p in xrange(len(procs))]
dateList=[[] for p in xrange(len(procs))] dateList=[[] for p in xrange(len(procs))]
hitList=[[] for p in xrange(len(procs))] hitList=[[] for p in xrange(len(procs))]
if dateRange: if dateRange:
for i in xrange(len(procs)): for i in xrange(len(procs)):
csvFile = str(procs[i])+".csv" csvFile = str(procs[i])+".csv"
with open(os.path.join(pth,csvFile), 'rb') as f: with open(os.path.join(pth,csvFile), 'rb') as f:
reader = csv.reader(f, delimiter=',', quoting=csv.QUOTE_NONE) reader = csv.reader(f, delimiter=',', quoting=csv.QUOTE_NONE)
for ts, at, h, n in reader: for ts, at, h, n in reader:
t = ts.split("T") t = ts.split("T")
date=''.join(x for x in t[0].split('-')) date=''.join(x for x in t[0].split('-'))
if (int(date) >= int(dateRange[0])) and (int(date) <= int(dateRange[1])): if (int(date) >= int(dateRange[0])) and (int(date) <= int(dateRange[1])):
timeList[i].append(at) timeList[i].append(at)
dateList[i].append(str(ts[0:10:1]+" "+ts[11:])) dateList[i].append(str(ts[0:10:1]+" "+ts[11:]))
hitList[i].append(int(h)-prevHitTotal) hitList[i].append(int(h)-prevHitTotal)
prevHitTotal=int(h) prevHitTotal=int(h)
f.close() f.close()
hitList[i][0]=None hitList[i][0]=None
graphData['trace'+str(i)] = go.Scatter( graphData['trace'+str(i)] = go.Scatter(
x = dateList[i], x = dateList[i],
y = timeList[i] if execTime else hitList[i], y = timeList[i] if execTime else hitList[i],
mode = plotType, mode = plotType,
name = (procs[i] if not oneAxis else (procs[i]+"_"+("time" if execTime else "hits"))) name = (procs[i] if not oneAxis else (procs[i]+"_"+("time" if execTime else "hits")))
) )
if plots==1: if plots==1:
fig.append_trace(graphData['trace'+str(i)], 1, 1) fig.append_trace(graphData['trace'+str(i)], 1, 1)
elif plots==2: elif plots==2:
fig.append_trace(graphData['trace'+str(i)], 2, 1) fig.append_trace(graphData['trace'+str(i)], 2, 1)
else: else:
for i in xrange(len(procs)): for i in xrange(len(procs)):
csvFile = str(procs[i])+".csv" csvFile = str(procs[i])+".csv"
with open(os.path.join(pth,csvFile), 'rb') as f: with open(os.path.join(pth,csvFile), 'rb') as f:
reader = csv.reader(f, delimiter=',', quoting=csv.QUOTE_NONE) reader = csv.reader(f, delimiter=',', quoting=csv.QUOTE_NONE)
for ts, at, h, n in reader: for ts, at, h, n in reader:
timeList[i].append(at) timeList[i].append(at)
dateList[i].append(str(ts[0:10:1]+" "+ts[11:])) dateList[i].append(str(ts[0:10:1]+" "+ts[11:]))
hitList[i].append(int(h)-prevHitTotal) hitList[i].append(int(h)-prevHitTotal)
prevHitTotal=int(h) prevHitTotal=int(h)
f.close() f.close()
hitList[i][0]=None hitList[i][0]=None
graphData['trace'+str(i)] = go.Scatter( graphData['trace'+str(i)] = go.Scatter(
x = dateList[i], x = dateList[i],
y = timeList[i] if execTime else hitList[i], y = timeList[i] if execTime else hitList[i],
mode = plotType, mode = plotType,
name = (procs[i] if not oneAxis else (procs[i]+"_"+("time" if execTime else "hits"))) name = (procs[i] if not oneAxis else (procs[i]+"_"+("time" if execTime else "hits")))
) )
if plots==1: if plots==1:
fig.append_trace(graphData['trace'+str(i)], 1, 1) fig.append_trace(graphData['trace'+str(i)], 1, 1)
elif plots==2: elif plots==2:
fig.append_trace(graphData['trace'+str(i)], 2, 1) fig.append_trace(graphData['trace'+str(i)], 2, 1)
# Formats the graph by adding axis titles, changing font sizes, setting there to be two separate graphs or two graphs sharing an x-axis etc. # Formats the graph by adding axis titles, changing font sizes, setting there to be two separate graphs or two graphs sharing an x-axis etc.
def formatGraph(two, oneAxis): def formatGraph(two, oneAxis):
fig['layout'].update(showlegend=True) fig['layout'].update(showlegend=True)
if two: if two:
if oneAxis: if oneAxis:
fig['layout']['xaxis1'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['xaxis1'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout']['yaxis1'].update(title='Hits Per Sample',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['yaxis1'].update(title='Hits Per Sample',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout']['yaxis2'].update(title='Average Execution Time (milliseconds)',anchor='x',overlaying='y',side='right',position=1,titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['yaxis2'].update(title='Average Execution Time (milliseconds)',anchor='x',overlaying='y',side='right',position=1,titlefont=dict(size=20, color='#4d4d4d'))
else: else:
fig['layout']['xaxis1'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['xaxis1'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout']['yaxis1'].update(title='Average Execution Time (milliseconds)',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['yaxis1'].update(title='Average Execution Time (milliseconds)',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout']['xaxis2'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['xaxis2'].update(title='Timestamp',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout']['yaxis2'].update(title='Hits Per Sample',titlefont=dict(size=20, color='#4d4d4d')) fig['layout']['yaxis2'].update(title='Hits Per Sample',titlefont=dict(size=20, color='#4d4d4d'))
fig['layout'].update(title=graphName, titlefont=dict(size=26)) fig['layout'].update(title=graphName, titlefont=dict(size=26))
else: else:
fig['layout'].update( fig['layout'].update(
title=graphName, title=graphName,
xaxis=dict( xaxis=dict(
title="Timestamp", title="Timestamp",
titlefont=dict( titlefont=dict(
family='Courier New, monospace', family='Courier New, monospace',
size=18, size=18,
color='#4d4d4d' color='#4d4d4d'
) )
), ),
yaxis=dict( yaxis=dict(
title="Average Execution Time (milliseconds)" if execTime else "Hits Per Sample", title="Average Execution Time (milliseconds)" if execTime else "Hits Per Sample",
titlefont=dict( titlefont=dict(
family='Courier New, monospace', family='Courier New, monospace',
size=18, size=18,
color='#4d4d4d' color='#4d4d4d'
) )
) )
) )
# Sets the name of the saved html file. # Sets the name of the saved html file.
def setFilename(graphName): def setFilename(graphName):
validName=False validName=False
if not os.path.exists("Graphs/"): if not os.path.exists("Graphs/"):
os.makedirs("Graphs/") os.makedirs("Graphs/")
os.chdir(os.path.join(dir,'Graphs/')) os.chdir(os.path.join(dir,'Graphs/'))
if not graphName: if not graphName:
graphName=time.strftime("%m-%d-%Y") graphName=time.strftime("%m-%d-%Y")
if os.path.exists(str(graphName+".html")): if os.path.exists(str(graphName+".html")):
n=1 n=1
while(not validName): while(not validName):
if os.path.exists(str(graphName+"("+str(n)+").html")): if os.path.exists(str(graphName+"("+str(n)+").html")):
n+=1 n+=1
else: else:
graphName=graphName+"("+str(n)+")" graphName=graphName+"("+str(n)+")"
validName=True validName=True
return graphName return graphName
@ -367,129 +367,129 @@ print "Welcome to plotter, type --help for information"
# Checks that plotly is installed, otherwise graphs cannot be generated. # Checks that plotly is installed, otherwise graphs cannot be generated.
plotCheck=commands.getstatusoutput("pip list | grep plotly") plotCheck=commands.getstatusoutput("pip list | grep plotly")
if plotCheck[0]==0: if plotCheck[0]==0:
if "plotly" not in plotCheck[1]: if "plotly" not in plotCheck[1]:
print "\n\tWARNING: Plotly is not installed on your system.\n\tPlease install it with: sudo pip install plotly\n" print "\n\tWARNING: Plotly is not installed on your system.\n\tPlease install it with: sudo pip install plotly\n"
sys.exit() sys.exit()
# Checks to see if logplot.cfg already exists, creates it if not. # Checks to see if logplot.cfg already exists, creates it if not.
if not os.path.isfile(os.path.join(dir,'logplot.cfg')): if not os.path.isfile(os.path.join(dir,'logplot.cfg')):
print "Generating logplot.cfg" print "Generating logplot.cfg"
updater(False) updater(False)
print "logplot.cfg created." print "logplot.cfg created."
if not os.path.isdir('./csv'): if not os.path.isdir('./csv'):
print "\n\tWARNING: ./csv directory is missing. Please run Histogram.sh or make sure directory has not been renamed.\n" print "\n\tWARNING: ./csv directory is missing. Please run Histogram.sh or make sure directory has not been renamed.\n"
sys.exit() sys.exit()
command = sys.argv # Takes arguments from the command line command = sys.argv # Takes arguments from the command line
if len(command)==1: if len(command)==1:
print "Running with default settings." print "Running with default settings."
default = True default = True
else: else:
for i in xrange(1,len(command)): for i in xrange(1,len(command)):
if command[i] == "-c": # Use config file if command[i] == "-c": # Use config file
config=True config=True
elif command[i] == "--g": # Groups elif command[i] == "--g": # Groups
for j in xrange(i+1,len(command)): for j in xrange(i+1,len(command)):
group.append(command[j]) group.append(command[j])
procs=gCommand(group) procs=gCommand(group)
break break
elif command[i] == "-t": # Average execution time elif command[i] == "-t": # Average execution time
execTime=True execTime=True
elif command[i] == "-h": # Delta hits between samples elif command[i] == "-h": # Delta hits between samples
hits=True hits=True
elif command[i] == "-l": # Graph with lines elif command[i] == "-l": # Graph with lines
lines=True lines=True
elif command[i] == "-m": # Graph with markers (scatter) elif command[i] == "-m": # Graph with markers (scatter)
markers=True markers=True
elif command[i] == "-lm": # Graph with lines and markers elif command[i] == "-lm": # Graph with lines and markers
lines=True lines=True
markers=True markers=True
elif command[i] == "-d": # Date range elif command[i] == "-d": # Date range
dateRange=command[i+1].split('-') dateRange=command[i+1].split('-')
if dateRange[0]: if dateRange[0]:
lower=dateRange[0].split("/") lower=dateRange[0].split("/")
dateRange[0]=lower[0]+lower[1].zfill(2)+lower[2].zfill(2) dateRange[0]=lower[0]+lower[1].zfill(2)+lower[2].zfill(2)
else: else:
dateRange[0]="0"*8 dateRange[0]="0"*8
if dateRange[1]: if dateRange[1]:
upper=dateRange[1].split("/") upper=dateRange[1].split("/")
dateRange[1]=upper[0]+upper[1].zfill(2)+upper[2].zfill(2) dateRange[1]=upper[0]+upper[1].zfill(2)+upper[2].zfill(2)
else: else:
dateRange[1]="9"*8 dateRange[1]="9"*8
i+=1 i+=1
elif command[i] == "-n": # Name of file to be generated elif command[i] == "-n": # Name of file to be generated
graphName=command[i+1] graphName=command[i+1]
i+=1 i+=1
elif command[i] == "-oneaxis": # Have hit and time data displayed on same graph elif command[i] == "-oneaxis": # Have hit and time data displayed on same graph
oneAxis=True oneAxis=True
elif (command[i] == "--help") or (command[i] == "--h"): # Print help message and exit script elif (command[i] == "--help") or (command[i] == "--h"): # Print help message and exit script
helpMessage() helpMessage()
sys.exit() sys.exit()
elif command[i] == "--p": # User-specified processes elif command[i] == "--p": # User-specified processes
for j in xrange(i+1,len(command)): for j in xrange(i+1,len(command)):
procs.append(command[j]) procs.append(command[j])
procs=pCommand(procs) procs=pCommand(procs)
break break
elif command[i] == "--update": elif command[i] == "--update":
print "Updating..." print "Updating..."
updater() updater()
print "Update complete." print "Update complete."
sys.exit() sys.exit()
# If neither average execution time nor delta hit count are specified to be shown, default to showing average execution time. # If neither average execution time nor delta hit count are specified to be shown, default to showing average execution time.
if (not execTime) and (not hits): if (not execTime) and (not hits):
execTime = True execTime = True
# Default settings can be changed as desired. # Default settings can be changed as desired.
if default: if default:
config=True config=True
execTime=True execTime=True
if (lines and markers): if (lines and markers):
plotType="lines+markers" plotType="lines+markers"
elif lines: elif lines:
plotType="lines" plotType="lines"
else: else:
plotType="markers" plotType="markers"
if config: if config:
f=open(os.path.join(dir,'logplot.cfg'),"r") f=open(os.path.join(dir,'logplot.cfg'),"r")
procList=f.read().splitlines() procList=f.read().splitlines()
for p in procList: for p in procList:
if "#END" in p: if "#END" in p:
break break
cfgLine=p.split() cfgLine=p.split()
if cfgLine[1]=="Y": if cfgLine[1]=="Y":
csvFile=cfgLine[0]+".csv" csvFile=cfgLine[0]+".csv"
if os.path.exists(os.path.join(pth,csvFile)): if os.path.exists(os.path.join(pth,csvFile)):
procs.append(cfgLine[0]) procs.append(cfgLine[0])
else: else:
warnings.append("WARNING: %s does not exist."%(csvFile,)) warnings.append("WARNING: %s does not exist."%(csvFile,))
f.close() f.close()
# If both average execution time and delta hits are specified to be shown, generate two graphs if -oneaxis wasn't specified. # If both average execution time and delta hits are specified to be shown, generate two graphs if -oneaxis wasn't specified.
# If only one of execution time and delta hits was specified, generate one graph. # If only one of execution time and delta hits was specified, generate one graph.
if procs: if procs:
if (execTime and hits): if (execTime and hits):
if(not oneAxis): if(not oneAxis):
fig = tools.make_subplots(rows=2, cols=1) fig = tools.make_subplots(rows=2, cols=1)
storeGraphData(procs, dateRange, execTime, False, 1) storeGraphData(procs, dateRange, execTime, False, 1)
storeGraphData(procs, dateRange, False, hits, 2) storeGraphData(procs, dateRange, False, hits, 2)
else: else:
fig = tools.make_subplots(rows=1, cols=1) fig = tools.make_subplots(rows=1, cols=1)
storeGraphData(procs, dateRange, False, hits, 1) storeGraphData(procs, dateRange, False, hits, 1)
storeGraphData(procs, dateRange, execTime, False, 1) storeGraphData(procs, dateRange, execTime, False, 1)
else: else:
fig = tools.make_subplots(rows=1, cols=1) fig = tools.make_subplots(rows=1, cols=1)
storeGraphData(procs, dateRange, execTime, hits) storeGraphData(procs, dateRange, execTime, hits)
formatGraph((execTime and hits), oneAxis) formatGraph((execTime and hits), oneAxis)
# Generates the plot # Generates the plot
plotly.offline.plot(fig, filename=setFilename(graphName)+".html") plotly.offline.plot(fig, filename=setFilename(graphName)+".html")
else: else:
warnings.append("NO GRAPH GENERATED BECAUSE NO VALID GROUP OR PROCESS NAME SPECIFIED.") warnings.append("NO GRAPH GENERATED BECAUSE NO VALID GROUP OR PROCESS NAME SPECIFIED.")
# If any warnings occured, print them # If any warnings occured, print them
if warnings: if warnings:
print "\n\t"+("\n\t").join(warnings)+"\n" print "\n\t"+("\n\t").join(warnings)+"\n"

View File

@ -39,8 +39,9 @@ def build_html_doc(build_dir, document_data):
six.print_(index_html, file=f) six.print_(index_html, file=f)
for toc_entry in document_data['table_of_contents']: for toc_entry in document_data['table_of_contents']:
toc_entry_data = yaml.load(open(DOC_SRC_DIR + '/' + toc_entry['link'] toc_entry_data = yaml.load(open(DOC_SRC_DIR + '/' +
+ '.yaml')) toc_entry['link'] +
'.yaml'))
toc_entry_data['page_link'] = toc_entry['link'] toc_entry_data['page_link'] = toc_entry['link']
page_content_template = j2_env.get_template('page_content.html') page_content_template = j2_env.get_template('page_content.html')
@ -65,4 +66,4 @@ if __name__ == '__main__':
if 'html' == args.builder: if 'html' == args.builder:
build_html_doc(args.build_dir, document_data) build_html_doc(args.build_dir, document_data)
else: else:
print "No builder selected, do nothing." print "No builder selected, do nothing."

View File

@ -1397,7 +1397,7 @@ class TestSwPatchStrategy:
validate_strategy_persists(strategy) validate_strategy_persists(strategy)
validate_phase(apply_phase, expected_results) validate_phase(apply_phase, expected_results)
# Test no reboot patches. # Test no reboot patches.
strategy = create_sw_patch_strategy( strategy = create_sw_patch_strategy(
compute_apply_type=SW_UPDATE_APPLY_TYPE.PARALLEL, compute_apply_type=SW_UPDATE_APPLY_TYPE.PARALLEL,
default_instance_action=SW_UPDATE_INSTANCE_ACTION.STOP_START, default_instance_action=SW_UPDATE_INSTANCE_ACTION.STOP_START,

View File

@ -1481,8 +1481,8 @@ class QueryAlarmsStep(strategy.StrategyStep):
nfvi_alarms = list() nfvi_alarms = list()
for nfvi_alarm in response['result-data']: for nfvi_alarm in response['result-data']:
if (self.strategy._alarm_restrictions == if (self.strategy._alarm_restrictions ==
strategy.STRATEGY_ALARM_RESTRICTION_TYPES.RELAXED strategy.STRATEGY_ALARM_RESTRICTION_TYPES.RELAXED and
and nfvi_alarm.mgmt_affecting == 'False'): nfvi_alarm.mgmt_affecting == 'False'):
DLOG.warn("Ignoring non-management affecting alarm " DLOG.warn("Ignoring non-management affecting alarm "
"%s - uuid %s due to relaxed alarm " "%s - uuid %s due to relaxed alarm "
"strictness" % (nfvi_alarm.alarm_id, "strictness" % (nfvi_alarm.alarm_id,
@ -1566,8 +1566,8 @@ class WaitDataSyncStep(strategy.StrategyStep):
nfvi_alarms = list() nfvi_alarms = list()
for nfvi_alarm in response['result-data']: for nfvi_alarm in response['result-data']:
if (self.strategy._alarm_restrictions == if (self.strategy._alarm_restrictions ==
strategy.STRATEGY_ALARM_RESTRICTION_TYPES.RELAXED strategy.STRATEGY_ALARM_RESTRICTION_TYPES.RELAXED and
and nfvi_alarm.mgmt_affecting == 'False'): nfvi_alarm.mgmt_affecting == 'False'):
DLOG.warn("Ignoring non-management affecting alarm " DLOG.warn("Ignoring non-management affecting alarm "
"%s - uuid %s due to relaxed alarm " "%s - uuid %s due to relaxed alarm "
"strictness" % (nfvi_alarm.alarm_id, "strictness" % (nfvi_alarm.alarm_id,

View File

@ -2,5 +2,3 @@
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #

View File

@ -14,7 +14,7 @@ from oslo_config import cfg
from nova_api_proxy.common import config from nova_api_proxy.common import config
from nova_api_proxy.common.service import Server from nova_api_proxy.common.service import Server
from nova_api_proxy.common import log as logging from nova_api_proxy.common import log as logging
from nova_api_proxy.common import histogram from nova_api_proxy.common import histogram
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)

View File

@ -2,4 +2,3 @@
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #

View File

@ -73,8 +73,8 @@ class APIController(Middleware):
return False return False
def _log_message(self, environ): def _log_message(self, environ):
remote_addr = environ.get('HTTP_X_FORWARDED_FOR', remote_addr = environ.get('HTTP_X_FORWARDED_FOR',
environ['REMOTE_ADDR']) environ['REMOTE_ADDR'])
LOG.info("%s request issued by user (%s) tenant (%s) remote address " LOG.info("%s request issued by user (%s) tenant (%s) remote address "
"(%s)" "(%s)"
" \"%s %s\"" % (environ['REQUEST_METHOD'], " \"%s %s\"" % (environ['REQUEST_METHOD'],

View File

@ -86,4 +86,3 @@ class APIDispatcher(object):
utils.set_request_forward_environ(req, self._remote_host, utils.set_request_forward_environ(req, self._remote_host,
self._remote_port) self._remote_port)
return self.app return self.app

View File

@ -87,8 +87,8 @@ class DebugProxy(Application):
else: else:
body = '' body = ''
path = (environ.get('SCRIPT_NAME', '') path = (environ.get('SCRIPT_NAME', '') +
+ environ.get('PATH_INFO', '')) environ.get('PATH_INFO', ''))
path = urllib.quote(path) path = urllib.quote(path)
if 'QUERY_STRING' in environ: if 'QUERY_STRING' in environ:
path += '?' + environ['QUERY_STRING'] path += '?' + environ['QUERY_STRING']

View File

@ -2,5 +2,3 @@
# #
# SPDX-License-Identifier: Apache-2.0 # SPDX-License-Identifier: Apache-2.0
# #

View File

@ -19,7 +19,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
# #
class ProxyException(Exception): class ProxyException(Exception):
"""Base Exception """Base Exception

View File

@ -156,4 +156,3 @@ def display_histogram_data(name=None):
histogram = _find_histogram(name) histogram = _find_histogram(name)
if histogram is not None: if histogram is not None:
histogram.display_data() histogram.display_data()

View File

@ -270,4 +270,3 @@ class Middleware(Application):
return response return response
response = req.get_response(self.application) response = req.get_response(self.application)
return self.process_response(response) return self.process_response(response)

View File

@ -33,4 +33,3 @@ def get_monotonic_timestamp_in_ms():
raise OSError(errno_, os.strerror(errno_)) raise OSError(errno_, os.strerror(errno_))
timestamp_ms = (t.tv_sec * 1e+3) + (t.tv_nsec * 1e-6) timestamp_ms = (t.tv_sec * 1e+3) + (t.tv_nsec * 1e-6)
return timestamp_ms return timestamp_ms

29
tox.ini
View File

@ -28,6 +28,35 @@ commands =
-o -type f -name '*.yaml' \ -o -type f -name '*.yaml' \
-print0 | xargs -0 yamllint" -print0 | xargs -0 yamllint"
[pep8]
# Temporarily ignoring these warnings
# E101 indentation contains mixed spaces and tabs
# E116 unexpected indentation (comment)
# E121 continuation line under-indented for hanging indent
# E122 continuation line missing indentation or outdented
# E123 closing bracket does not match indentation of opening bracket
# E124 closing bracket does not match visual indentation
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# E128 continuation line under-indented for visual indent
# E129 visually indented line with same indent as next logical line
# E203 whitespace before ':'
# E211 whitespace before '('
# E225 missing whitespace around operator
# E226 missing whitespace around arithmetic operator
# E228 missing whitespace around modulo operator
# E231 missing whitespace after ':'
# E241 multiple spaces after
# E261 at least two spaces before inline comment
# E265 block comment should start with '# '
# E251 unexpected spaces around keyword / parameter equals
# E302 expected 2 blank lines, found 1
# E303 too many blank lines
# E501 line too long
# E712 comparison to bool should be reworded
ignore = E101,E116,E121,E123,E122,E124,E126,E127,E128,E129,E203,E211,E225,E226,E228,E231,E241,E251,E261,E265,E302,E303,E501,E712
[testenv:pep8] [testenv:pep8]
usedevelop = False usedevelop = False
skip_install = True skip_install = True