You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

statement_splitter.py 3.6KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106
  1. # -*- coding: utf-8 -*-
  2. #
  3. # Copyright (C) 2009-2018 the sqlparse authors and contributors
  4. # <see AUTHORS file>
  5. #
  6. # This module is part of python-sqlparse and is released under
  7. # the BSD License: https://opensource.org/licenses/BSD-3-Clause
  8. from sqlparse import sql, tokens as T
  9. class StatementSplitter(object):
  10. """Filter that split stream at individual statements"""
  11. def __init__(self):
  12. self._reset()
  13. def _reset(self):
  14. """Set the filter attributes to its default values"""
  15. self._in_declare = False
  16. self._is_create = False
  17. self._begin_depth = 0
  18. self.consume_ws = False
  19. self.tokens = []
  20. self.level = 0
  21. def _change_splitlevel(self, ttype, value):
  22. """Get the new split level (increase, decrease or remain equal)"""
  23. # ANSI
  24. # if normal token return
  25. # wouldn't parenthesis increase/decrease a level?
  26. # no, inside a parenthesis can't start new statement
  27. if ttype not in T.Keyword:
  28. return 0
  29. # Everything after here is ttype = T.Keyword
  30. # Also to note, once entered an If statement you are done and basically
  31. # returning
  32. unified = value.upper()
  33. # three keywords begin with CREATE, but only one of them is DDL
  34. # DDL Create though can contain more words such as "or replace"
  35. if ttype is T.Keyword.DDL and unified.startswith('CREATE'):
  36. self._is_create = True
  37. return 0
  38. # can have nested declare inside of being...
  39. if unified == 'DECLARE' and self._is_create and self._begin_depth == 0:
  40. self._in_declare = True
  41. return 1
  42. if unified == 'BEGIN':
  43. self._begin_depth += 1
  44. if self._is_create:
  45. # FIXME(andi): This makes no sense.
  46. return 1
  47. return 0
  48. # Should this respect a preceding BEGIN?
  49. # In CASE ... WHEN ... END this results in a split level -1.
  50. # Would having multiple CASE WHEN END and a Assignment Operator
  51. # cause the statement to cut off prematurely?
  52. if unified == 'END':
  53. self._begin_depth = max(0, self._begin_depth - 1)
  54. return -1
  55. if (unified in ('IF', 'FOR', 'WHILE')
  56. and self._is_create and self._begin_depth > 0):
  57. return 1
  58. if unified in ('END IF', 'END FOR', 'END WHILE'):
  59. return -1
  60. # Default
  61. return 0
  62. def process(self, stream):
  63. """Process the stream"""
  64. EOS_TTYPE = T.Whitespace, T.Comment.Single
  65. # Run over all stream tokens
  66. for ttype, value in stream:
  67. # Yield token if we finished a statement and there's no whitespaces
  68. # It will count newline token as a non whitespace. In this context
  69. # whitespace ignores newlines.
  70. # why don't multi line comments also count?
  71. if self.consume_ws and ttype not in EOS_TTYPE:
  72. yield sql.Statement(self.tokens)
  73. # Reset filter and prepare to process next statement
  74. self._reset()
  75. # Change current split level (increase, decrease or remain equal)
  76. self.level += self._change_splitlevel(ttype, value)
  77. # Append the token to the current statement
  78. self.tokens.append(sql.Token(ttype, value))
  79. # Check if we get the end of a statement
  80. if self.level <= 0 and ttype is T.Punctuation and value == ';':
  81. self.consume_ws = True
  82. # Yield pending statement (if any)
  83. if self.tokens:
  84. yield sql.Statement(self.tokens)