Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

raw_metrics.py 4.0KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120
  1. # Copyright (c) 2007, 2010, 2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
  2. # Copyright (c) 2013 Google, Inc.
  3. # Copyright (c) 2014 Arun Persaud <arun@nubati.net>
  4. # Copyright (c) 2015-2017 Claudiu Popa <pcmanticore@gmail.com>
  5. # Copyright (c) 2015 Mike Frysinger <vapier@gentoo.org>
  6. # Copyright (c) 2015 Ionel Cristian Maries <contact@ionelmc.ro>
  7. # Copyright (c) 2016 Glenn Matthews <glenn@e-dad.net>
  8. # Licensed under the GPL: https://www.gnu.org/licenses/old-licenses/gpl-2.0.html
  9. # For details: https://github.com/PyCQA/pylint/blob/master/COPYING
  10. """ Copyright (c) 2003-2010 LOGILAB S.A. (Paris, FRANCE).
  11. http://www.logilab.fr/ -- mailto:contact@logilab.fr
  12. Raw metrics checker
  13. """
  14. import tokenize
  15. from pylint.interfaces import ITokenChecker
  16. from pylint.exceptions import EmptyReportError
  17. from pylint.checkers import BaseTokenChecker
  18. from pylint.reporters import diff_string
  19. from pylint.reporters.ureports.nodes import Table
  20. def report_raw_stats(sect, stats, old_stats):
  21. """calculate percentage of code / doc / comment / empty
  22. """
  23. total_lines = stats['total_lines']
  24. if not total_lines:
  25. raise EmptyReportError()
  26. sect.description = '%s lines have been analyzed' % total_lines
  27. lines = ('type', 'number', '%', 'previous', 'difference')
  28. for node_type in ('code', 'docstring', 'comment', 'empty'):
  29. key = node_type + '_lines'
  30. total = stats[key]
  31. percent = float(total * 100) / total_lines
  32. old = old_stats.get(key, None)
  33. if old is not None:
  34. diff_str = diff_string(old, total)
  35. else:
  36. old, diff_str = 'NC', 'NC'
  37. lines += (node_type, str(total), '%.2f' % percent,
  38. str(old), diff_str)
  39. sect.append(Table(children=lines, cols=5, rheaders=1))
  40. class RawMetricsChecker(BaseTokenChecker):
  41. """does not check anything but gives some raw metrics :
  42. * total number of lines
  43. * total number of code lines
  44. * total number of docstring lines
  45. * total number of comments lines
  46. * total number of empty lines
  47. """
  48. __implements__ = (ITokenChecker,)
  49. # configuration section name
  50. name = 'metrics'
  51. # configuration options
  52. options = ()
  53. # messages
  54. msgs = {}
  55. # reports
  56. reports = (('RP0701', 'Raw metrics', report_raw_stats),)
  57. def __init__(self, linter):
  58. BaseTokenChecker.__init__(self, linter)
  59. self.stats = None
  60. def open(self):
  61. """init statistics"""
  62. self.stats = self.linter.add_stats(total_lines=0, code_lines=0,
  63. empty_lines=0, docstring_lines=0,
  64. comment_lines=0)
  65. def process_tokens(self, tokens):
  66. """update stats"""
  67. i = 0
  68. tokens = list(tokens)
  69. while i < len(tokens):
  70. i, lines_number, line_type = get_type(tokens, i)
  71. self.stats['total_lines'] += lines_number
  72. self.stats[line_type] += lines_number
  73. JUNK = (tokenize.NL, tokenize.INDENT, tokenize.NEWLINE, tokenize.ENDMARKER)
  74. def get_type(tokens, start_index):
  75. """return the line type : docstring, comment, code, empty"""
  76. i = start_index
  77. tok_type = tokens[i][0]
  78. start = tokens[i][2]
  79. pos = start
  80. line_type = None
  81. while i < len(tokens) and tokens[i][2][0] == start[0]:
  82. tok_type = tokens[i][0]
  83. pos = tokens[i][3]
  84. if line_type is None:
  85. if tok_type == tokenize.STRING:
  86. line_type = 'docstring_lines'
  87. elif tok_type == tokenize.COMMENT:
  88. line_type = 'comment_lines'
  89. elif tok_type in JUNK:
  90. pass
  91. else:
  92. line_type = 'code_lines'
  93. i += 1
  94. if line_type is None:
  95. line_type = 'empty_lines'
  96. elif i < len(tokens) and tokens[i][0] == tokenize.NEWLINE:
  97. i += 1
  98. return i, pos[0] - start[0] + 1, line_type
  99. def register(linter):
  100. """ required method to auto register this checker """
  101. linter.register_checker(RawMetricsChecker(linter))