Development of an internal social media platform with personalised dashboards for students
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

_base.py 48KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595
  1. ##############################################################################
  2. #
  3. # Copyright 2011 Zope Foundation and Contributors.
  4. # All Rights Reserved.
  5. #
  6. # This software is subject to the provisions of the Zope Public License,
  7. # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
  8. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
  9. # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
  10. # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
  11. # FOR A PARTICULAR PURPOSE.
  12. #
  13. ##############################################################################
  14. """Python BTree implementation
  15. """
  16. from struct import Struct
  17. from struct import error as struct_error
  18. from operator import index
  19. from persistent import Persistent
  20. from .Interfaces import BTreesConflictError
  21. from ._compat import PY3
  22. from ._compat import compare
  23. from ._compat import int_types
  24. from ._compat import xrange
  25. _marker = object()
  26. class _Base(Persistent):
  27. __slots__ = ()
  28. _key_type = list
  29. def __init__(self, items=None):
  30. self.clear()
  31. if items:
  32. self.update(items)
  33. try:
  34. # Detect the presence of the C extensions.
  35. # If they're NOT around, we don't need to do any of the
  36. # special pickle support to make Python versions look like
  37. # C---we just rename the classes. By not defining these methods,
  38. # we can (theoretically) avoid a bit of a slowdown.
  39. # If the C extensions are around, we do need these methods, but
  40. # these classes are unlikely to be used in production anyway.
  41. __import__('BTrees._OOBTree')
  42. except ImportError: # pragma: no cover
  43. pass
  44. else:
  45. def __reduce__(self):
  46. # Swap out the type constructor for the C version, if present.
  47. func, typ_gna, state = Persistent.__reduce__(self)
  48. # We ignore the returned type altogether in favor of
  49. # our calculated class (which allows subclasses but replaces our exact
  50. # type with the C equivalent)
  51. typ = self.__class__
  52. gna = typ_gna[1:]
  53. return (func, (typ,) + gna, state)
  54. @property
  55. def __class__(self):
  56. type_self = type(self)
  57. return type_self._BTree_reduce_as if type_self._BTree_reduce_up_bound is type_self else type_self
  58. @property
  59. def _BTree_reduce_as(self):
  60. # Return the pickle replacement class for this object.
  61. # If the C extensions are available, this will be the
  62. # C type (setup by _fix_pickle), otherwise it will be the real
  63. # type of this object.
  64. # This implementation is replaced by _fix_pickle and exists for
  65. # testing purposes.
  66. return type(self) # pragma: no cover
  67. _BTree_reduce_up_bound = _BTree_reduce_as
  68. class _BucketBase(_Base):
  69. __slots__ = ('_keys',
  70. '_next',
  71. '_to_key',
  72. )
  73. def clear(self):
  74. self._keys = self._key_type()
  75. self._next = None
  76. def __len__(self):
  77. return len(self._keys)
  78. @property
  79. def size(self):
  80. return len(self._keys)
  81. def _deleteNextBucket(self):
  82. next = self._next
  83. if next is not None:
  84. self._next = next._next
  85. def _search(self, key):
  86. # Return non-negative index on success
  87. # return -(insertion_index + 1) on fail
  88. low = 0
  89. keys = self._keys
  90. high = len(keys)
  91. while low < high:
  92. i = (low + high) // 2
  93. k = keys[i]
  94. if k is key or k == key:
  95. return i
  96. if compare(k, key) < 0:
  97. low = i + 1
  98. else:
  99. high = i
  100. return -1 - low
  101. def minKey(self, key=_marker):
  102. if key is _marker or key is None:
  103. return self._keys[0]
  104. key = self._to_key(key)
  105. index = self._search(key)
  106. if index >= 0:
  107. return key
  108. index = -index - 1
  109. if index < len(self._keys):
  110. return self._keys[index]
  111. else:
  112. raise ValueError("no key satisfies the conditions")
  113. def maxKey(self, key=_marker):
  114. if key is _marker or key is None:
  115. return self._keys[-1]
  116. key = self._to_key(key)
  117. index = self._search(key)
  118. if index >= 0:
  119. return key
  120. else:
  121. index = -index-1
  122. if index:
  123. return self._keys[index-1]
  124. else:
  125. raise ValueError("no key satisfies the conditions")
  126. def _range(self, min=_marker, max=_marker,
  127. excludemin=False, excludemax=False):
  128. if min is _marker or min is None:
  129. start = 0
  130. if excludemin:
  131. start = 1
  132. else:
  133. min = self._to_key(min)
  134. start = self._search(min)
  135. if start >= 0:
  136. if excludemin:
  137. start += 1
  138. else:
  139. start = -start - 1
  140. if max is _marker or max is None:
  141. end = len(self._keys)
  142. if excludemax:
  143. end -= 1
  144. else:
  145. max = self._to_key(max)
  146. end = self._search(max)
  147. if end >= 0:
  148. if not excludemax:
  149. end += 1
  150. else:
  151. end = -end - 1
  152. return start, end
  153. def keys(self, *args, **kw):
  154. start, end = self._range(*args, **kw)
  155. return self._keys[start:end]
  156. def iterkeys(self, *args, **kw):
  157. if not (args or kw):
  158. return iter(self._keys)
  159. keys = self._keys
  160. return (keys[i] for i in xrange(*self._range(*args, **kw)))
  161. def __iter__(self):
  162. return iter(self._keys)
  163. def __contains__(self, key):
  164. return (self._search(self._to_key(key)) >= 0)
  165. has_key = __contains__
  166. def _repr_helper(self, items):
  167. type_self = type(self)
  168. mod = type_self.__module__
  169. name = type_self.__name__
  170. name = name[:-2] if name.endswith("Py") else name
  171. return "%s.%s(%r)" % (mod, name, items)
  172. class _SetIteration(object):
  173. __slots__ = ('to_iterate',
  174. 'useValues',
  175. '_iter',
  176. 'active',
  177. 'position',
  178. 'key',
  179. 'value',
  180. )
  181. def __init__(self, to_iterate, useValues=False, default=None):
  182. if to_iterate is None:
  183. to_iterate = ()
  184. self.to_iterate = to_iterate
  185. if useValues:
  186. try:
  187. itmeth = to_iterate.iteritems
  188. except AttributeError:
  189. if PY3 and isinstance(to_iterate, dict): #pragma no cover Py3k
  190. itmeth = to_iterate.items().__iter__
  191. else:
  192. itmeth = to_iterate.__iter__
  193. useValues = False
  194. else:
  195. self.value = None
  196. else:
  197. itmeth = to_iterate.__iter__
  198. self.useValues = useValues
  199. self._iter = itmeth()
  200. self.active = True
  201. self.position = 0
  202. self.key = _marker
  203. self.value = default
  204. self.advance()
  205. def advance(self):
  206. try:
  207. if self.useValues:
  208. self.key, self.value = next(self._iter)
  209. else:
  210. self.key = next(self._iter)
  211. self.position += 1
  212. except StopIteration:
  213. self.active = False
  214. self.position = -1
  215. return self
  216. _object_lt = getattr(object, '__lt__', _marker)
  217. def _no_default_comparison(key):
  218. # Enforce test that key has non-default comparison.
  219. if key is None:
  220. return
  221. if type(key) is object:
  222. raise TypeError("Can't use object() as keys")
  223. lt = getattr(key, '__lt__', None)
  224. if lt is not None:
  225. # CPython 3.x follows PEP 252, defining '__objclass__'
  226. if getattr(lt, '__objclass__', None) is object:
  227. lt = None # pragma: no cover Py3k
  228. # PyPy3 doesn't follow PEP 252, but defines '__func__'
  229. elif getattr(lt, '__func__', None) is _object_lt:
  230. lt = None # pragma: no cover PyPy3
  231. if (lt is None and
  232. getattr(key, '__cmp__', None) is None):
  233. raise TypeError("Object has default comparison")
  234. class Bucket(_BucketBase):
  235. __slots__ = ()
  236. _value_type = list
  237. _to_value = lambda self, x: x
  238. VALUE_SAME_CHECK = False
  239. def setdefault(self, key, value):
  240. key, value = self._to_key(key), self._to_value(value)
  241. status, value = self._set(key, value, True)
  242. return value
  243. def pop(self, key, default=_marker):
  244. try:
  245. status, value = self._del(self._to_key(key))
  246. except KeyError:
  247. if default is _marker:
  248. raise
  249. return default
  250. else:
  251. return value
  252. def update(self, items):
  253. if hasattr(items, 'iteritems'):
  254. items = items.iteritems()
  255. elif hasattr(items, 'items'):
  256. items = items.items()
  257. _si = self.__setitem__
  258. try:
  259. for key, value in items:
  260. _si(key, value)
  261. except ValueError:
  262. raise TypeError('items must be a sequence of 2-tuples')
  263. def __setitem__(self, key, value):
  264. _no_default_comparison(key)
  265. self._set(self._to_key(key), self._to_value(value))
  266. def __delitem__(self, key):
  267. self._del(self._to_key(key))
  268. def clear(self):
  269. _BucketBase.clear(self)
  270. self._values = self._value_type()
  271. def get(self, key, default=None):
  272. index = self._search(self._to_key(key))
  273. if index < 0:
  274. return default
  275. return self._values[index]
  276. def __getitem__(self, key):
  277. index = self._search(self._to_key(key))
  278. if index < 0:
  279. raise KeyError(key)
  280. return self._values[index]
  281. def _set(self, key, value, ifunset=False):
  282. """Set a value
  283. Return: status, value
  284. Status is:
  285. None if no change
  286. 0 if change, but not size change
  287. 1 if change and size change
  288. """
  289. index = self._search(key)
  290. if index >= 0:
  291. if (ifunset or
  292. self.VALUE_SAME_CHECK and value == self._values[index]
  293. ):
  294. return None, self._values[index]
  295. self._p_changed = True
  296. self._values[index] = value
  297. return 0, value
  298. else:
  299. self._p_changed = True
  300. index = -index - 1
  301. self._keys.insert(index, key)
  302. self._values.insert(index, value)
  303. return 1, value
  304. def _del(self, key):
  305. index = self._search(key)
  306. if index >= 0:
  307. self._p_changed = True
  308. del self._keys[index]
  309. return 0, self._values.pop(index)
  310. raise KeyError(key)
  311. def _split(self, index=-1):
  312. if index < 0 or index >= len(self._keys):
  313. index = len(self._keys) // 2
  314. new_instance = type(self)()
  315. new_instance._keys = self._keys[index:]
  316. new_instance._values = self._values[index:]
  317. del self._keys[index:]
  318. del self._values[index:]
  319. new_instance._next = self._next
  320. self._next = new_instance
  321. return new_instance
  322. def values(self, *args, **kw):
  323. start, end = self._range(*args, **kw)
  324. return self._values[start:end]
  325. def itervalues(self, *args, **kw):
  326. values = self._values
  327. return (values[i] for i in xrange(*self._range(*args, **kw)))
  328. def items(self, *args, **kw):
  329. keys = self._keys
  330. values = self._values
  331. return [(keys[i], values[i])
  332. for i in xrange(*self._range(*args, **kw))]
  333. def iteritems(self, *args, **kw):
  334. keys = self._keys
  335. values = self._values
  336. return ((keys[i], values[i])
  337. for i in xrange(*self._range(*args, **kw)))
  338. def __getstate__(self):
  339. keys = self._keys
  340. values = self._values
  341. data = []
  342. for i in range(len(keys)):
  343. data.append(keys[i])
  344. data.append(values[i])
  345. data = tuple(data)
  346. if self._next is not None:
  347. return data, self._next
  348. return (data, )
  349. def __setstate__(self, state):
  350. if not isinstance(state[0], tuple):
  351. raise TypeError("tuple required for first state element")
  352. self.clear()
  353. if len(state) == 2:
  354. state, self._next = state
  355. else:
  356. self._next = None
  357. state = state[0]
  358. keys = self._keys
  359. values = self._values
  360. for i in range(0, len(state), 2):
  361. keys.append(state[i])
  362. values.append(state[i+1])
  363. def _p_resolveConflict(self, s_old, s_com, s_new):
  364. b_old = type(self)()
  365. if s_old is not None:
  366. b_old.__setstate__(s_old)
  367. b_com = type(self)()
  368. if s_com is not None:
  369. b_com.__setstate__(s_com)
  370. b_new = type(self)()
  371. if s_new is not None:
  372. b_new.__setstate__(s_new)
  373. if (b_com._next != b_old._next or
  374. b_new._next != b_old._next):
  375. raise BTreesConflictError(-1, -1, -1, 0)
  376. if not b_com or not b_new:
  377. raise BTreesConflictError(-1, -1, -1, 12)
  378. i_old = _SetIteration(b_old, True)
  379. i_com = _SetIteration(b_com, True)
  380. i_new = _SetIteration(b_new, True)
  381. def merge_error(reason):
  382. return BTreesConflictError(
  383. i_old.position, i_com.position, i_new.position, reason)
  384. result = type(self)()
  385. def merge_output(it):
  386. result._keys.append(it.key)
  387. result._values.append(it.value)
  388. it.advance()
  389. while i_old.active and i_com.active and i_new.active:
  390. cmpOC = compare(i_old.key, i_com.key)
  391. cmpON = compare(i_old.key, i_new.key)
  392. if cmpOC == 0:
  393. if cmpON == 0:
  394. if i_com.value == i_old.value:
  395. result[i_old.key] = i_new.value
  396. elif i_new.value == i_old.value:
  397. result[i_old.key] = i_com.value
  398. else:
  399. raise merge_error(1)
  400. i_old.advance()
  401. i_com.advance()
  402. i_new.advance()
  403. elif (cmpON > 0): # insert in new
  404. merge_output(i_new)
  405. elif i_old.value == i_com.value: # deleted new
  406. if i_new.position == 1:
  407. # Deleted the first item. This will modify the
  408. # parent node, so we don't know if merging will be
  409. # safe
  410. raise merge_error(13)
  411. i_old.advance()
  412. i_com.advance()
  413. else:
  414. raise merge_error(2)
  415. elif cmpON == 0:
  416. if cmpOC > 0: # insert committed
  417. merge_output(i_com)
  418. elif i_old.value == i_new.value: # delete committed
  419. if i_com.position == 1:
  420. # Deleted the first item. This will modify the
  421. # parent node, so we don't know if merging will be
  422. # safe
  423. raise merge_error(13)
  424. i_old.advance()
  425. i_new.advance()
  426. else:
  427. raise merge_error(3)
  428. else: # both keys changed
  429. cmpCN = compare(i_com.key, i_new.key)
  430. if cmpCN == 0: # dueling insert
  431. raise merge_error(4)
  432. if cmpOC > 0: # insert committed
  433. if cmpCN > 0: # insert i_new first
  434. merge_output(i_new)
  435. else:
  436. merge_output(i_com)
  437. elif cmpON > 0: # insert i_new
  438. merge_output(i_new)
  439. else:
  440. raise merge_error(5) # both deleted same key
  441. while i_com.active and i_new.active: # new inserts
  442. cmpCN = compare(i_com.key, i_new.key)
  443. if cmpCN == 0:
  444. raise merge_error(6) # dueling insert
  445. if cmpCN > 0: # insert new
  446. merge_output(i_new)
  447. else: # insert committed
  448. merge_output(i_com)
  449. while i_old.active and i_com.active: # new deletes rest of original
  450. cmpOC = compare(i_old.key, i_com.key)
  451. if cmpOC > 0: # insert committed
  452. merge_output(i_com)
  453. elif cmpOC == 0 and (i_old.value == i_com.value): # del in new
  454. i_old.advance()
  455. i_com.advance()
  456. else: # dueling deletes or delete and change
  457. raise merge_error(7)
  458. while i_old.active and i_new.active:
  459. # committed deletes rest of original
  460. cmpON = compare(i_old.key, i_new.key)
  461. if cmpON > 0: # insert new
  462. merge_output(i_new)
  463. elif cmpON == 0 and (i_old.value == i_new.value):
  464. # deleted in committed
  465. i_old.advance()
  466. i_new.advance()
  467. else: # dueling deletes or delete and change
  468. raise merge_error(8)
  469. if i_old.active: # dueling deletes
  470. raise merge_error(9)
  471. while i_com.active:
  472. merge_output(i_com)
  473. while i_new.active:
  474. merge_output(i_new)
  475. if len(result._keys) == 0: #pragma: no cover
  476. # If the output bucket is empty, conflict resolution doesn't have
  477. # enough info to unlink it from its containing BTree correctly.
  478. #
  479. # XXX TS, 2012-11-16: I don't think this is possible
  480. #
  481. raise merge_error(10)
  482. result._next = b_old._next
  483. return result.__getstate__()
  484. def __repr__(self):
  485. return self._repr_helper(self.items())
  486. class Set(_BucketBase):
  487. __slots__ = ()
  488. def add(self, key):
  489. return self._set(self._to_key(key))[0]
  490. insert = add
  491. def remove(self, key):
  492. self._del(self._to_key(key))
  493. def update(self, items):
  494. add = self.add
  495. for i in items:
  496. add(i)
  497. def __getstate__(self):
  498. data = tuple(self._keys)
  499. if self._next is not None:
  500. return data, self._next
  501. return (data, )
  502. def __setstate__(self, state):
  503. if not isinstance(state[0], tuple):
  504. raise TypeError('tuple required for first state element')
  505. self.clear()
  506. if len(state) == 2:
  507. state, self._next = state
  508. else:
  509. self._next = None
  510. state = state[0]
  511. self._keys.extend(state)
  512. def _set(self, key, value=None, ifunset=False):
  513. index = self._search(key)
  514. if index < 0:
  515. index = -index - 1
  516. self._p_changed = True
  517. self._keys.insert(index, key)
  518. return True, None
  519. return False, None
  520. def _del(self, key):
  521. index = self._search(key)
  522. if index >= 0:
  523. self._p_changed = True
  524. del self._keys[index]
  525. return 0, 0
  526. raise KeyError(key)
  527. def __getitem__(self, i):
  528. return self._keys[i]
  529. def _split(self, index=-1):
  530. if index < 0 or index >= len(self._keys):
  531. index = len(self._keys) // 2
  532. new_instance = type(self)()
  533. new_instance._keys = self._keys[index:]
  534. del self._keys[index:]
  535. new_instance._next = self._next
  536. self._next = new_instance
  537. return new_instance
  538. def _p_resolveConflict(self, s_old, s_com, s_new):
  539. b_old = type(self)()
  540. if s_old is not None:
  541. b_old.__setstate__(s_old)
  542. b_com = type(self)()
  543. if s_com is not None:
  544. b_com.__setstate__(s_com)
  545. b_new = type(self)()
  546. if s_new is not None:
  547. b_new.__setstate__(s_new)
  548. if (b_com._next != b_old._next or
  549. b_new._next != b_old._next): # conflict: com or new changed _next
  550. raise BTreesConflictError(-1, -1, -1, 0)
  551. if not b_com or not b_new: # conflict: com or new empty
  552. raise BTreesConflictError(-1, -1, -1, 12)
  553. i_old = _SetIteration(b_old, True)
  554. i_com = _SetIteration(b_com, True)
  555. i_new = _SetIteration(b_new, True)
  556. def merge_error(reason):
  557. return BTreesConflictError(
  558. i_old.position, i_com.position, i_new.position, reason)
  559. result = type(self)()
  560. def merge_output(it):
  561. result._keys.append(it.key)
  562. it.advance()
  563. while i_old.active and i_com.active and i_new.active:
  564. cmpOC = compare(i_old.key, i_com.key)
  565. cmpON = compare(i_old.key, i_new.key)
  566. if cmpOC == 0:
  567. if cmpON == 0: # all match
  568. merge_output(i_old)
  569. i_com.advance()
  570. i_new.advance()
  571. elif cmpON > 0: # insert in new
  572. merge_output(i_new)
  573. else: # deleted new
  574. if i_new.position == 1:
  575. # Deleted the first item. This will modify the
  576. # parent node, so we don't know if merging will be
  577. # safe
  578. raise merge_error(13)
  579. i_old.advance()
  580. i_com.advance()
  581. elif cmpON == 0:
  582. if cmpOC > 0: # insert committed
  583. merge_output(i_com)
  584. else: # delete committed
  585. if i_com.position == 1:
  586. # Deleted the first item. This will modify the
  587. # parent node, so we don't know if merging will be
  588. # safe
  589. raise merge_error(13)
  590. i_old.advance()
  591. i_new.advance()
  592. else: # both com and new keys changed
  593. cmpCN = compare(i_com.key, i_new.key)
  594. if cmpCN == 0: # both inserted same key
  595. raise merge_error(4)
  596. if cmpOC > 0: # insert committed
  597. if cmpCN > 0: # insert i_new first
  598. merge_output(i_new)
  599. else:
  600. merge_output(i_com)
  601. elif cmpON > 0: # insert i_new
  602. merge_output(i_new)
  603. else: # both com and new deleted same key
  604. raise merge_error(5)
  605. while i_com.active and i_new.active: # new inserts
  606. cmpCN = compare(i_com.key, i_new.key)
  607. if cmpCN == 0: # dueling insert
  608. raise merge_error(6)
  609. if cmpCN > 0: # insert new
  610. merge_output(i_new)
  611. else: # insert committed
  612. merge_output(i_com)
  613. while i_old.active and i_com.active: # new deletes rest of original
  614. cmpOC = compare(i_old.key, i_com.key)
  615. if cmpOC > 0: # insert committed
  616. merge_output(i_com)
  617. elif cmpOC == 0: # del in new
  618. i_old.advance()
  619. i_com.advance()
  620. else: # dueling deletes or delete and change
  621. raise merge_error(7)
  622. while i_old.active and i_new.active:
  623. # committed deletes rest of original
  624. cmpON = compare(i_old.key, i_new.key)
  625. if cmpON > 0: # insert new
  626. merge_output(i_new)
  627. elif cmpON == 0: # deleted in committed
  628. i_old.advance()
  629. i_new.advance()
  630. else: # dueling deletes or delete and change
  631. raise merge_error(8)
  632. if i_old.active: # dueling deletes
  633. raise merge_error(9)
  634. while i_com.active:
  635. merge_output(i_com)
  636. while i_new.active:
  637. merge_output(i_new)
  638. if len(result._keys) == 0: #pragma: no cover
  639. # If the output bucket is empty, conflict resolution doesn't have
  640. # enough info to unlink it from its containing BTree correctly.
  641. #
  642. # XXX TS, 2012-11-16: I don't think this is possible
  643. #
  644. raise merge_error(10)
  645. result._next = b_old._next
  646. return result.__getstate__()
  647. def __repr__(self):
  648. return self._repr_helper(self._keys)
  649. class _TreeItem(object):
  650. __slots__ = ('key',
  651. 'child',
  652. )
  653. def __init__(self, key, child):
  654. self.key = key
  655. self.child = child
  656. class _Tree(_Base):
  657. __slots__ = ('_data',
  658. '_firstbucket',
  659. )
  660. def __new__(cls, *args):
  661. value = _Base.__new__(cls, *args)
  662. # Empty trees don't get their __setstate__ called upon
  663. # unpickling (or __init__, obviously), so clear() is never called
  664. # and _data and _firstbucket are never defined, unless we do it here.
  665. value._data = []
  666. value._firstbucket = None
  667. return value
  668. def setdefault(self, key, value):
  669. return self._set(self._to_key(key), self._to_value(value), True)[1]
  670. def pop(self, key, default=_marker):
  671. try:
  672. return self._del(self._to_key(key))[1]
  673. except KeyError:
  674. if default is _marker:
  675. raise
  676. return default
  677. def update(self, items):
  678. if hasattr(items, 'iteritems'):
  679. items = items.iteritems()
  680. elif hasattr(items, 'items'):
  681. items = items.items()
  682. set = self.__setitem__
  683. for i in items:
  684. set(*i)
  685. def __setitem__(self, key, value):
  686. _no_default_comparison(key)
  687. self._set(self._to_key(key), self._to_value(value))
  688. def __delitem__(self, key):
  689. self._del(self._to_key(key))
  690. def clear(self):
  691. if self._data:
  692. # In the case of __init__, this was already set by __new__
  693. self._data = []
  694. self._firstbucket = None
  695. def __nonzero__(self):
  696. return bool(self._data)
  697. __bool__ = __nonzero__ #Py3k rename
  698. def __len__(self):
  699. l = 0
  700. bucket = self._firstbucket
  701. while bucket is not None:
  702. l += len(bucket._keys)
  703. bucket = bucket._next
  704. return l
  705. @property
  706. def size(self):
  707. return len(self._data)
  708. def _search(self, key):
  709. data = self._data
  710. if data:
  711. lo = 0
  712. hi = len(data)
  713. i = hi // 2
  714. while i > lo:
  715. cmp_ = compare(data[i].key, key)
  716. if cmp_ < 0:
  717. lo = i
  718. elif cmp_ > 0:
  719. hi = i
  720. else:
  721. break
  722. i = (lo + hi) // 2
  723. return i
  724. return -1
  725. def _findbucket(self, key):
  726. index = self._search(key)
  727. if index >= 0:
  728. child = self._data[index].child
  729. if isinstance(child, self._bucket_type):
  730. return child
  731. return child._findbucket(key)
  732. def __contains__(self, key):
  733. try:
  734. tree_key = self._to_key(key)
  735. except TypeError:
  736. # Can't convert the key, so can't possibly be in the tree
  737. return False
  738. return key in (self._findbucket(tree_key) or ())
  739. def has_key(self, key):
  740. index = self._search(key)
  741. if index < 0:
  742. return False
  743. r = self._data[index].child.has_key(key)
  744. return r and r + 1
  745. def keys(self, min=_marker, max=_marker,
  746. excludemin=False, excludemax=False,
  747. itertype='iterkeys'):
  748. if not self._data:
  749. return ()
  750. if min is not _marker and min is not None:
  751. min = self._to_key(min)
  752. bucket = self._findbucket(min)
  753. else:
  754. bucket = self._firstbucket
  755. iterargs = min, max, excludemin, excludemax
  756. return _TreeItems(bucket, itertype, iterargs)
  757. def iterkeys(self, min=_marker, max=_marker,
  758. excludemin=False, excludemax=False):
  759. return iter(self.keys(min, max, excludemin, excludemax))
  760. def __iter__(self):
  761. return iter(self.keys())
  762. def minKey(self, min=_marker):
  763. if min is _marker or min is None:
  764. bucket = self._firstbucket
  765. else:
  766. min = self._to_key(min)
  767. bucket = self._findbucket(min)
  768. if bucket is not None:
  769. return bucket.minKey(min)
  770. raise ValueError('empty tree')
  771. def maxKey(self, max=_marker):
  772. data = self._data
  773. if not data:
  774. raise ValueError('empty tree')
  775. if max is _marker or max is None:
  776. return data[-1].child.maxKey()
  777. max = self._to_key(max)
  778. index = self._search(max)
  779. if index and compare(data[index].child.minKey(), max) > 0:
  780. index -= 1 #pragma: no cover no idea how to provoke this
  781. return data[index].child.maxKey(max)
  782. def _set(self, key, value=None, ifunset=False):
  783. if (self._p_jar is not None and
  784. self._p_oid is not None and
  785. self._p_serial is not None):
  786. self._p_jar.readCurrent(self)
  787. data = self._data
  788. if data:
  789. index = self._search(key)
  790. child = data[index].child
  791. else:
  792. index = 0
  793. child = self._bucket_type()
  794. self._firstbucket = child
  795. data.append(_TreeItem(None, child))
  796. result = child._set(key, value, ifunset)
  797. grew = result[0]
  798. if grew:
  799. if type(child) is type(self):
  800. max_size = self.max_internal_size
  801. else:
  802. max_size = self.max_leaf_size
  803. if child.size > max_size:
  804. self._grow(child, index)
  805. # If a BTree contains only a single bucket, BTree.__getstate__()
  806. # includes the bucket's entire state, and the bucket doesn't get
  807. # an oid of its own. So if we have a single oid-less bucket that
  808. # changed, it's *our* oid that should be marked as changed -- the
  809. # bucket doesn't have one.
  810. if (grew is not None and
  811. type(child) is self._bucket_type and
  812. len(data) == 1 and
  813. child._p_oid is None):
  814. self._p_changed = 1
  815. return result
  816. def _grow(self, child, index):
  817. self._p_changed = True
  818. new_child = child._split()
  819. self._data.insert(index+1, _TreeItem(new_child.minKey(), new_child))
  820. if len(self._data) >= self.max_internal_size * 2:
  821. self._split_root()
  822. def _split_root(self):
  823. child = type(self)()
  824. child._data = self._data
  825. child._firstbucket = self._firstbucket
  826. self._data = [_TreeItem(None, child)]
  827. self._grow(child, 0)
  828. def _split(self, index=None):
  829. data = self._data
  830. if index is None:
  831. index = len(data) // 2
  832. next = type(self)()
  833. next._data = data[index:]
  834. first = data[index]
  835. del data[index:]
  836. if len(data) == 0:
  837. self._firstbucket = None # lost our bucket, can't buy no beer
  838. if isinstance(first.child, type(self)):
  839. next._firstbucket = first.child._firstbucket
  840. else:
  841. next._firstbucket = first.child;
  842. return next
  843. def _del(self, key):
  844. if (self._p_jar is not None and
  845. self._p_oid is not None and
  846. self._p_serial is not None):
  847. self._p_jar.readCurrent(self)
  848. data = self._data
  849. if not data:
  850. raise KeyError(key)
  851. index = self._search(key)
  852. child = data[index].child
  853. removed_first_bucket, value = child._del(key)
  854. # See comment in _set about small trees
  855. if (len(data) == 1 and
  856. type(child) is self._bucket_type and
  857. child._p_oid is None):
  858. self._p_changed = True
  859. # fix up the node key, but not for the 0'th one.
  860. if index > 0 and child.size and compare(key, data[index].key) == 0:
  861. self._p_changed = True
  862. data[index].key = child.minKey()
  863. if removed_first_bucket:
  864. if index:
  865. data[index-1].child._deleteNextBucket()
  866. removed_first_bucket = False # clear flag
  867. else:
  868. self._firstbucket = child._firstbucket
  869. if not child.size:
  870. if type(child) is self._bucket_type:
  871. if index:
  872. data[index-1].child._deleteNextBucket()
  873. else:
  874. self._firstbucket = child._next
  875. removed_first_bucket = True
  876. del data[index]
  877. return removed_first_bucket, value
  878. def _deleteNextBucket(self):
  879. self._data[-1].child._deleteNextBucket()
  880. def __getstate__(self):
  881. data = self._data
  882. if not data:
  883. # Note: returning None here causes our __setstate__
  884. # to not be called on unpickling
  885. return None
  886. if (len(data) == 1 and
  887. type(data[0].child) is not type(self) and
  888. data[0].child._p_oid is None
  889. ):
  890. return ((data[0].child.__getstate__(), ), )
  891. sdata = []
  892. for item in data:
  893. if sdata:
  894. sdata.append(item.key)
  895. sdata.append(item.child)
  896. else:
  897. sdata.append(item.child)
  898. return tuple(sdata), self._firstbucket
  899. def __setstate__(self, state):
  900. if state and not isinstance(state[0], tuple):
  901. raise TypeError('tuple required for first state element')
  902. self.clear()
  903. if state is None:
  904. return
  905. if len(state) == 1:
  906. bucket = self._bucket_type()
  907. bucket.__setstate__(state[0][0])
  908. state = [bucket], bucket
  909. data, self._firstbucket = state
  910. data = list(reversed(data))
  911. self._data.append(_TreeItem(None, data.pop()))
  912. while data:
  913. key = data.pop()
  914. child = data.pop()
  915. self._data.append(_TreeItem(key, child))
  916. def _assert(self, condition, message):
  917. if not condition:
  918. raise AssertionError(message)
  919. def _check(self, nextbucket=None):
  920. data = self._data
  921. assert_ = self._assert
  922. if not data:
  923. assert_(self._firstbucket is None,
  924. "Empty BTree has non-NULL firstbucket")
  925. return
  926. assert_(self._firstbucket is not None,
  927. "Non-empty BTree has NULL firstbucket")
  928. child_class = type(data[0].child)
  929. for i in data:
  930. assert_(i.child is not None, "BTree has NULL child")
  931. assert_(type(i.child) is child_class,
  932. "BTree children have different types")
  933. assert_(i.child.size, "Bucket length < 1")
  934. if child_class is type(self):
  935. assert_(self._firstbucket is data[0].child._firstbucket,
  936. "BTree has firstbucket different than "
  937. "its first child's firstbucket")
  938. for i in range(len(data)-1):
  939. data[i].child._check(data[i+1].child._firstbucket)
  940. data[-1].child._check(nextbucket)
  941. elif child_class is self._bucket_type:
  942. assert_(self._firstbucket is data[0].child,
  943. "Bottom-level BTree node has inconsistent firstbucket "
  944. "belief")
  945. for i in range(len(data)-1):
  946. assert_(data[i].child._next is data[i+1].child,
  947. "Bucket next pointer is damaged")
  948. assert_(data[-1].child._next is nextbucket,
  949. "Bucket next pointer is damaged")
  950. else:
  951. assert_(False, "Incorrect child type")
  952. def _p_resolveConflict(self, old, com, new):
  953. s_old = _get_simple_btree_bucket_state(old)
  954. s_com = _get_simple_btree_bucket_state(com)
  955. s_new = _get_simple_btree_bucket_state(new)
  956. return ((
  957. self._bucket_type()._p_resolveConflict(s_old, s_com, s_new), ), )
  958. def __repr__(self):
  959. r = super(_Tree, self).__repr__()
  960. r = r.replace('Py', '')
  961. return r
  962. def _get_simple_btree_bucket_state(state):
  963. if state is None:
  964. return state
  965. if not isinstance(state, tuple):
  966. raise TypeError("_p_resolveConflict: expected tuple or None for state")
  967. if len(state) == 2: # non-degenerate BTree, can't resolve
  968. raise BTreesConflictError(-1, -1, -1, 11)
  969. # Peel away wrapper to get to only-bucket state.
  970. if len(state) != 1:
  971. raise TypeError("_p_resolveConflict: expected 1- or 2-tuple for state")
  972. state = state[0]
  973. if not isinstance(state, tuple) or len(state) != 1:
  974. raise TypeError("_p_resolveConflict: expected 1-tuple containing "
  975. "bucket state")
  976. state = state[0]
  977. if not isinstance(state, tuple):
  978. raise TypeError("_p_resolveConflict: expected tuple for bucket state")
  979. return state
  980. class _TreeItems(object):
  981. __slots__ = ('firstbucket',
  982. 'itertype',
  983. 'iterargs',
  984. 'index',
  985. 'it',
  986. 'v',
  987. '_len',
  988. )
  989. def __init__(self, firstbucket, itertype, iterargs):
  990. self.firstbucket = firstbucket
  991. self.itertype = itertype
  992. self.iterargs = iterargs
  993. self.index = -1
  994. self.it = iter(self)
  995. self.v = None
  996. self._len = None
  997. def __getitem__(self, i):
  998. if isinstance(i, slice):
  999. return list(self)[i]
  1000. if i < 0:
  1001. i = len(self) + i
  1002. if i < 0:
  1003. raise IndexError(i)
  1004. if i < self.index:
  1005. self.index = -1
  1006. self.it = iter(self)
  1007. while i > self.index:
  1008. try:
  1009. self.v = next(self.it)
  1010. except StopIteration:
  1011. raise IndexError(i)
  1012. else:
  1013. self.index += 1
  1014. return self.v
  1015. def __len__(self):
  1016. if self._len is None:
  1017. i = 0
  1018. for _ in self:
  1019. i += 1
  1020. self._len = i
  1021. return self._len
  1022. def __iter__(self):
  1023. bucket = self.firstbucket
  1024. itertype = self.itertype
  1025. iterargs = self.iterargs
  1026. done = 0
  1027. # Note that we don't mind if the first bucket yields no
  1028. # results due to an idiosyncrasy in how range searches are done.
  1029. while bucket is not None:
  1030. for k in getattr(bucket, itertype)(*iterargs):
  1031. yield k
  1032. done = 0
  1033. if done:
  1034. return
  1035. bucket = bucket._next
  1036. done = 1
  1037. class _TreeIterator(object):
  1038. """ Faux implementation for BBB only.
  1039. """
  1040. def __init__(self, items): #pragma: no cover
  1041. raise TypeError(
  1042. "TreeIterators are private implementation details "
  1043. "of the C-based BTrees.\n\n"
  1044. "Please use 'iter(tree)', rather than instantiating "
  1045. "one directly."
  1046. )
  1047. class Tree(_Tree):
  1048. __slots__ = ()
  1049. def get(self, key, default=None):
  1050. bucket = self._findbucket(key)
  1051. if bucket:
  1052. return bucket.get(key, default)
  1053. return default
  1054. def __getitem__(self, key):
  1055. bucket = self._findbucket(key)
  1056. if bucket:
  1057. return bucket[key]
  1058. raise KeyError(key)
  1059. def values(self, min=_marker, max=_marker,
  1060. excludemin=False, excludemax=False):
  1061. return self.keys(min, max, excludemin, excludemax, 'itervalues')
  1062. def itervalues(self, min=_marker, max=_marker,
  1063. excludemin=False, excludemax=False):
  1064. return iter(self.values(min, max, excludemin, excludemax))
  1065. def items(self, min=_marker, max=_marker,
  1066. excludemin=False, excludemax=False):
  1067. return self.keys(min, max, excludemin, excludemax, 'iteritems')
  1068. def iteritems(self, min=_marker, max=_marker,
  1069. excludemin=False, excludemax=False):
  1070. return iter(self.items(min, max, excludemin, excludemax))
  1071. def byValue(self, min):
  1072. return reversed(
  1073. sorted((v, k) for (k, v) in self.iteritems() if v >= min))
  1074. def insert(self, key, value):
  1075. return bool(self._set(key, value, True)[0])
  1076. class TreeSet(_Tree):
  1077. __slots__ = ()
  1078. def add(self, key):
  1079. return self._set(self._to_key(key))[0]
  1080. insert = add
  1081. def remove(self, key):
  1082. self._del(self._to_key(key))
  1083. def update(self, items):
  1084. add = self.add
  1085. for i in items:
  1086. add(i)
  1087. _p_resolveConflict = _Tree._p_resolveConflict
  1088. class set_operation(object):
  1089. __slots__ = ('func',
  1090. 'set_type',
  1091. )
  1092. def __init__(self, func, set_type):
  1093. self.func = func
  1094. self.set_type = set_type
  1095. def __call__(self, *a, **k):
  1096. return self.func(self.set_type, *a, **k)
  1097. def difference(set_type, o1, o2):
  1098. if o1 is None or o2 is None:
  1099. return o1
  1100. i1 = _SetIteration(o1, True, 0)
  1101. i2 = _SetIteration(o2, False, 0)
  1102. if i1.useValues:
  1103. result = o1._mapping_type()
  1104. def copy(i):
  1105. result._keys.append(i.key)
  1106. result._values.append(i.value)
  1107. else:
  1108. result = o1._set_type()
  1109. def copy(i):
  1110. result._keys.append(i.key)
  1111. while i1.active and i2.active:
  1112. cmp_ = compare(i1.key, i2.key)
  1113. if cmp_ < 0:
  1114. copy(i1)
  1115. i1.advance()
  1116. elif cmp_ == 0:
  1117. i1.advance()
  1118. i2.advance()
  1119. else:
  1120. i2.advance()
  1121. while i1.active:
  1122. copy(i1)
  1123. i1.advance()
  1124. return result
  1125. def union(set_type, o1, o2):
  1126. if o1 is None:
  1127. return o2
  1128. if o2 is None:
  1129. return o1
  1130. i1 = _SetIteration(o1, False, 0)
  1131. i2 = _SetIteration(o2, False, 0)
  1132. result = o1._set_type()
  1133. def copy(i):
  1134. result._keys.append(i.key)
  1135. while i1.active and i2.active:
  1136. cmp_ = compare(i1.key, i2.key)
  1137. if cmp_ < 0:
  1138. copy(i1)
  1139. i1.advance()
  1140. elif cmp_ == 0:
  1141. copy(i1)
  1142. i1.advance()
  1143. i2.advance()
  1144. else:
  1145. copy(i2)
  1146. i2.advance()
  1147. while i1.active:
  1148. copy(i1)
  1149. i1.advance()
  1150. while i2.active:
  1151. copy(i2)
  1152. i2.advance()
  1153. return result
  1154. def intersection(set_type, o1, o2):
  1155. if o1 is None:
  1156. return o2
  1157. if o2 is None:
  1158. return o1
  1159. i1 = _SetIteration(o1, False, 0)
  1160. i2 = _SetIteration(o2, False, 0)
  1161. result = o1._set_type()
  1162. def copy(i):
  1163. result._keys.append(i.key)
  1164. while i1.active and i2.active:
  1165. cmp_ = compare(i1.key, i2.key)
  1166. if cmp_ < 0:
  1167. i1.advance()
  1168. elif cmp_ == 0:
  1169. copy(i1)
  1170. i1.advance()
  1171. i2.advance()
  1172. else:
  1173. i2.advance()
  1174. return result
  1175. def _prepMergeIterators(o1, o2):
  1176. MERGE_DEFAULT = getattr(o1, 'MERGE_DEFAULT', None)
  1177. if MERGE_DEFAULT is None:
  1178. raise TypeError("invalid set operation")
  1179. i1 = _SetIteration(o1, True, MERGE_DEFAULT)
  1180. i2 = _SetIteration(o2, True, MERGE_DEFAULT)
  1181. return i1, i2
  1182. def weightedUnion(set_type, o1, o2, w1=1, w2=1):
  1183. if o1 is None:
  1184. if o2 is None:
  1185. return 0, None
  1186. return w2, o2
  1187. if o2 is None:
  1188. return w1, o1
  1189. i1, i2 = _prepMergeIterators(o1, o2)
  1190. MERGE = getattr(o1, 'MERGE', None)
  1191. if MERGE is None and i1.useValues and i2.useValues:
  1192. raise TypeError("invalid set operation")
  1193. MERGE_WEIGHT = getattr(o1, 'MERGE_WEIGHT')
  1194. if (not i1.useValues) and i2.useValues:
  1195. i1, i2 = i2, i1
  1196. w1, w2 = w2, w1
  1197. _merging = i1.useValues or i2.useValues
  1198. if _merging:
  1199. result = o1._mapping_type()
  1200. def copy(i, w):
  1201. result._keys.append(i.key)
  1202. result._values.append(MERGE_WEIGHT(i.value, w))
  1203. else:
  1204. result = o1._set_type()
  1205. def copy(i, w):
  1206. result._keys.append(i.key)
  1207. while i1.active and i2.active:
  1208. cmp_ = compare(i1.key, i2.key)
  1209. if cmp_ < 0:
  1210. copy(i1, w1)
  1211. i1.advance()
  1212. elif cmp_ == 0:
  1213. result._keys.append(i1.key)
  1214. if _merging:
  1215. result._values.append(MERGE(i1.value, w1, i2.value, w2))
  1216. i1.advance()
  1217. i2.advance()
  1218. else:
  1219. copy(i2, w2)
  1220. i2.advance()
  1221. while i1.active:
  1222. copy(i1, w1)
  1223. i1.advance()
  1224. while i2.active:
  1225. copy(i2, w2)
  1226. i2.advance()
  1227. return 1, result
  1228. def weightedIntersection(set_type, o1, o2, w1=1, w2=1):
  1229. if o1 is None:
  1230. if o2 is None:
  1231. return 0, None
  1232. return w2, o2
  1233. if o2 is None:
  1234. return w1, o1
  1235. i1, i2 = _prepMergeIterators(o1, o2)
  1236. MERGE = getattr(o1, 'MERGE', None)
  1237. if MERGE is None and i1.useValues and i2.useValues:
  1238. raise TypeError("invalid set operation")
  1239. if (not i1.useValues) and i2.useValues:
  1240. i1, i2 = i2, i1
  1241. w1, w2 = w2, w1
  1242. _merging = i1.useValues or i2.useValues
  1243. if _merging:
  1244. result = o1._mapping_type()
  1245. else:
  1246. result = o1._set_type()
  1247. while i1.active and i2.active:
  1248. cmp_ = compare(i1.key, i2.key)
  1249. if cmp_ < 0:
  1250. i1.advance()
  1251. elif cmp_ == 0:
  1252. result._keys.append(i1.key)
  1253. if _merging:
  1254. result._values.append(MERGE(i1.value, w1, i2.value, w2))
  1255. i1.advance()
  1256. i2.advance()
  1257. else:
  1258. i2.advance()
  1259. if isinstance(result, (Set, TreeSet)):
  1260. return w1 + w2, result
  1261. return 1, result
  1262. def multiunion(set_type, seqs):
  1263. # XXX simple/slow implementation. Goal is just to get tests to pass.
  1264. result = set_type()
  1265. for s in seqs:
  1266. try:
  1267. iter(s)
  1268. except TypeError:
  1269. s = set_type((s, ))
  1270. result.update(s)
  1271. return result
  1272. def to_ob(self, v):
  1273. return v
  1274. def _packer_unpacker(struct_format):
  1275. s = Struct(struct_format)
  1276. return s.pack, s.unpack
  1277. int_pack, int_unpack = _packer_unpacker('i')
  1278. def to_int(self, v):
  1279. try:
  1280. int_pack(index(v))
  1281. except (struct_error, TypeError):
  1282. raise TypeError('32-bit integer expected')
  1283. return int(v)
  1284. float_pack = _packer_unpacker('f')[0]
  1285. def to_float(self, v):
  1286. try:
  1287. float_pack(v)
  1288. except struct_error:
  1289. raise TypeError('float expected')
  1290. return float(v)
  1291. long_pack, long_unpack = _packer_unpacker('q')
  1292. def to_long(self, v):
  1293. try:
  1294. long_pack(index(v))
  1295. except (struct_error, TypeError):
  1296. if isinstance(v, int_types):
  1297. raise ValueError("Value out of range", v)
  1298. raise TypeError('64-bit integer expected')
  1299. return int(v)
  1300. def to_bytes(l):
  1301. def to(self, v):
  1302. if not (isinstance(v, bytes) and len(v) == l):
  1303. raise TypeError("%s-byte array expected" % l)
  1304. return v
  1305. return to
  1306. tos = dict(I=to_int, L=to_long, F=to_float, O=to_ob)
  1307. MERGE_DEFAULT_int = 1
  1308. MERGE_DEFAULT_float = 1.0
  1309. def MERGE(self, value1, weight1, value2, weight2):
  1310. return (value1 * weight1) + (value2 * weight2)
  1311. def MERGE_WEIGHT_default(self, value, weight):
  1312. return value
  1313. def MERGE_WEIGHT_numeric(self, value, weight):
  1314. return value * weight
  1315. def _fix_pickle(mod_dict, mod_name):
  1316. # Make the pure-Python objects pickle with the same
  1317. # class names and types as the C extensions by setting the appropriate
  1318. # _BTree_reduce_as attribute.
  1319. # If the C extensions are not available, we also change the
  1320. # __name__ attribute of the type to match the C name (otherwise
  1321. # we wind up with *Py in the pickles)
  1322. # Each module must call this as `_fix_pickle(globals(), __name__)`
  1323. # at the bottom.
  1324. mod_prefix = mod_name.split('.')[-1][:2] # BTrees.OOBTree -> 'OO'
  1325. bucket_name = mod_prefix + 'Bucket'
  1326. py_bucket_name = bucket_name + 'Py'
  1327. have_c_extensions = mod_dict[bucket_name] is not mod_dict[py_bucket_name]
  1328. for name in 'Bucket', 'Set', 'BTree', 'TreeSet', 'TreeIterator':
  1329. raw_name = mod_prefix + name
  1330. py_name = raw_name + 'Py'
  1331. try:
  1332. py_type = mod_dict[py_name]
  1333. except KeyError:
  1334. if name == 'TreeIterator':
  1335. # Optional
  1336. continue
  1337. raise # pragma: no cover
  1338. raw_type = mod_dict[raw_name] # Could be C or Python
  1339. py_type._BTree_reduce_as = raw_type
  1340. py_type._BTree_reduce_up_bound = py_type
  1341. if not have_c_extensions: # pragma: no cover
  1342. # Set FooPy to have the __name__ of simply Foo.
  1343. # We can't do this if the C extension is available,
  1344. # because then mod_dict[FooPy.__name__] is not FooPy
  1345. # and pickle refuses to save something like that.
  1346. # On the other hand (no C extension) this makes our
  1347. # Python pickle match the C version by default
  1348. py_type.__name__ = raw_name
  1349. py_type.__qualname__ = raw_name # Py 3.3+