Initial commit of OpenSPARC T2 architecture model.
[OpenSPARC-T2-SAM] / sam-t2 / devtools / amd64 / lib / python2.4 / pickle.py
CommitLineData
920dae64
AT
1"""Create portable serialized representations of Python objects.
2
3See module cPickle for a (much) faster implementation.
4See module copy_reg for a mechanism for registering custom picklers.
5See module pickletools source for extensive comments.
6
7Classes:
8
9 Pickler
10 Unpickler
11
12Functions:
13
14 dump(object, file)
15 dumps(object) -> string
16 load(file) -> object
17 loads(string) -> object
18
19Misc variables:
20
21 __version__
22 format_version
23 compatible_formats
24
25"""
26
27__version__ = "$Revision: 1.158 $" # Code version
28
29from types import *
30from copy_reg import dispatch_table
31from copy_reg import _extension_registry, _inverted_registry, _extension_cache
32import marshal
33import sys
34import struct
35import re
36import warnings
37
38__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
39 "Unpickler", "dump", "dumps", "load", "loads"]
40
41# These are purely informational; no code uses these.
42format_version = "2.0" # File format version we write
43compatible_formats = ["1.0", # Original protocol 0
44 "1.1", # Protocol 0 with INST added
45 "1.2", # Original protocol 1
46 "1.3", # Protocol 1 with BINFLOAT added
47 "2.0", # Protocol 2
48 ] # Old format versions we can read
49
50# Keep in synch with cPickle. This is the highest protocol number we
51# know how to read.
52HIGHEST_PROTOCOL = 2
53
54# Why use struct.pack() for pickling but marshal.loads() for
55# unpickling? struct.pack() is 40% faster than marshal.dumps(), but
56# marshal.loads() is twice as fast as struct.unpack()!
57mloads = marshal.loads
58
59class PickleError(Exception):
60 """A common base class for the other pickling exceptions."""
61 pass
62
63class PicklingError(PickleError):
64 """This exception is raised when an unpicklable object is passed to the
65 dump() method.
66
67 """
68 pass
69
70class UnpicklingError(PickleError):
71 """This exception is raised when there is a problem unpickling an object,
72 such as a security violation.
73
74 Note that other exceptions may also be raised during unpickling, including
75 (but not necessarily limited to) AttributeError, EOFError, ImportError,
76 and IndexError.
77
78 """
79 pass
80
81# An instance of _Stop is raised by Unpickler.load_stop() in response to
82# the STOP opcode, passing the object that is the result of unpickling.
83class _Stop(Exception):
84 def __init__(self, value):
85 self.value = value
86
87# Jython has PyStringMap; it's a dict subclass with string keys
88try:
89 from org.python.core import PyStringMap
90except ImportError:
91 PyStringMap = None
92
93# UnicodeType may or may not be exported (normally imported from types)
94try:
95 UnicodeType
96except NameError:
97 UnicodeType = None
98
99# Pickle opcodes. See pickletools.py for extensive docs. The listing
100# here is in kind-of alphabetical order of 1-character pickle code.
101# pickletools groups them by purpose.
102
103MARK = '(' # push special markobject on stack
104STOP = '.' # every pickle ends with STOP
105POP = '0' # discard topmost stack item
106POP_MARK = '1' # discard stack top through topmost markobject
107DUP = '2' # duplicate top stack item
108FLOAT = 'F' # push float object; decimal string argument
109INT = 'I' # push integer or bool; decimal string argument
110BININT = 'J' # push four-byte signed int
111BININT1 = 'K' # push 1-byte unsigned int
112LONG = 'L' # push long; decimal string argument
113BININT2 = 'M' # push 2-byte unsigned int
114NONE = 'N' # push None
115PERSID = 'P' # push persistent object; id is taken from string arg
116BINPERSID = 'Q' # " " " ; " " " " stack
117REDUCE = 'R' # apply callable to argtuple, both on stack
118STRING = 'S' # push string; NL-terminated string argument
119BINSTRING = 'T' # push string; counted binary string argument
120SHORT_BINSTRING = 'U' # " " ; " " " " < 256 bytes
121UNICODE = 'V' # push Unicode string; raw-unicode-escaped'd argument
122BINUNICODE = 'X' # " " " ; counted UTF-8 string argument
123APPEND = 'a' # append stack top to list below it
124BUILD = 'b' # call __setstate__ or __dict__.update()
125GLOBAL = 'c' # push self.find_class(modname, name); 2 string args
126DICT = 'd' # build a dict from stack items
127EMPTY_DICT = '}' # push empty dict
128APPENDS = 'e' # extend list on stack by topmost stack slice
129GET = 'g' # push item from memo on stack; index is string arg
130BINGET = 'h' # " " " " " " ; " " 1-byte arg
131INST = 'i' # build & push class instance
132LONG_BINGET = 'j' # push item from memo on stack; index is 4-byte arg
133LIST = 'l' # build list from topmost stack items
134EMPTY_LIST = ']' # push empty list
135OBJ = 'o' # build & push class instance
136PUT = 'p' # store stack top in memo; index is string arg
137BINPUT = 'q' # " " " " " ; " " 1-byte arg
138LONG_BINPUT = 'r' # " " " " " ; " " 4-byte arg
139SETITEM = 's' # add key+value pair to dict
140TUPLE = 't' # build tuple from topmost stack items
141EMPTY_TUPLE = ')' # push empty tuple
142SETITEMS = 'u' # modify dict by adding topmost key+value pairs
143BINFLOAT = 'G' # push float; arg is 8-byte float encoding
144
145TRUE = 'I01\n' # not an opcode; see INT docs in pickletools.py
146FALSE = 'I00\n' # not an opcode; see INT docs in pickletools.py
147
148# Protocol 2
149
150PROTO = '\x80' # identify pickle protocol
151NEWOBJ = '\x81' # build object by applying cls.__new__ to argtuple
152EXT1 = '\x82' # push object from extension registry; 1-byte index
153EXT2 = '\x83' # ditto, but 2-byte index
154EXT4 = '\x84' # ditto, but 4-byte index
155TUPLE1 = '\x85' # build 1-tuple from stack top
156TUPLE2 = '\x86' # build 2-tuple from two topmost stack items
157TUPLE3 = '\x87' # build 3-tuple from three topmost stack items
158NEWTRUE = '\x88' # push True
159NEWFALSE = '\x89' # push False
160LONG1 = '\x8a' # push long from < 256 bytes
161LONG4 = '\x8b' # push really big long
162
163_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
164
165
166__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
167del x
168
169
170# Pickling machinery
171
172class Pickler:
173
174 def __init__(self, file, protocol=None, bin=None):
175 """This takes a file-like object for writing a pickle data stream.
176
177 The optional protocol argument tells the pickler to use the
178 given protocol; supported protocols are 0, 1, 2. The default
179 protocol is 0, to be backwards compatible. (Protocol 0 is the
180 only protocol that can be written to a file opened in text
181 mode and read back successfully. When using a protocol higher
182 than 0, make sure the file is opened in binary mode, both when
183 pickling and unpickling.)
184
185 Protocol 1 is more efficient than protocol 0; protocol 2 is
186 more efficient than protocol 1.
187
188 Specifying a negative protocol version selects the highest
189 protocol version supported. The higher the protocol used, the
190 more recent the version of Python needed to read the pickle
191 produced.
192
193 The file parameter must have a write() method that accepts a single
194 string argument. It can thus be an open file object, a StringIO
195 object, or any other custom object that meets this interface.
196
197 """
198 if protocol is not None and bin is not None:
199 raise ValueError, "can't specify both 'protocol' and 'bin'"
200 if bin is not None:
201 warnings.warn("The 'bin' argument to Pickler() is deprecated",
202 DeprecationWarning)
203 protocol = bin
204 if protocol is None:
205 protocol = 0
206 if protocol < 0:
207 protocol = HIGHEST_PROTOCOL
208 elif not 0 <= protocol <= HIGHEST_PROTOCOL:
209 raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
210 self.write = file.write
211 self.memo = {}
212 self.proto = int(protocol)
213 self.bin = protocol >= 1
214 self.fast = 0
215
216 def clear_memo(self):
217 """Clears the pickler's "memo".
218
219 The memo is the data structure that remembers which objects the
220 pickler has already seen, so that shared or recursive objects are
221 pickled by reference and not by value. This method is useful when
222 re-using picklers.
223
224 """
225 self.memo.clear()
226
227 def dump(self, obj):
228 """Write a pickled representation of obj to the open file."""
229 if self.proto >= 2:
230 self.write(PROTO + chr(self.proto))
231 self.save(obj)
232 self.write(STOP)
233
234 def memoize(self, obj):
235 """Store an object in the memo."""
236
237 # The Pickler memo is a dictionary mapping object ids to 2-tuples
238 # that contain the Unpickler memo key and the object being memoized.
239 # The memo key is written to the pickle and will become
240 # the key in the Unpickler's memo. The object is stored in the
241 # Pickler memo so that transient objects are kept alive during
242 # pickling.
243
244 # The use of the Unpickler memo length as the memo key is just a
245 # convention. The only requirement is that the memo values be unique.
246 # But there appears no advantage to any other scheme, and this
247 # scheme allows the Unpickler memo to be implemented as a plain (but
248 # growable) array, indexed by memo key.
249 if self.fast:
250 return
251 assert id(obj) not in self.memo
252 memo_len = len(self.memo)
253 self.write(self.put(memo_len))
254 self.memo[id(obj)] = memo_len, obj
255
256 # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
257 def put(self, i, pack=struct.pack):
258 if self.bin:
259 if i < 256:
260 return BINPUT + chr(i)
261 else:
262 return LONG_BINPUT + pack("<i", i)
263
264 return PUT + repr(i) + '\n'
265
266 # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
267 def get(self, i, pack=struct.pack):
268 if self.bin:
269 if i < 256:
270 return BINGET + chr(i)
271 else:
272 return LONG_BINGET + pack("<i", i)
273
274 return GET + repr(i) + '\n'
275
276 def save(self, obj):
277 # Check for persistent id (defined by a subclass)
278 pid = self.persistent_id(obj)
279 if pid:
280 self.save_pers(pid)
281 return
282
283 # Check the memo
284 x = self.memo.get(id(obj))
285 if x:
286 self.write(self.get(x[0]))
287 return
288
289 # Check the type dispatch table
290 t = type(obj)
291 f = self.dispatch.get(t)
292 if f:
293 f(self, obj) # Call unbound method with explicit self
294 return
295
296 # Check for a class with a custom metaclass; treat as regular class
297 try:
298 issc = issubclass(t, TypeType)
299 except TypeError: # t is not a class (old Boost; see SF #502085)
300 issc = 0
301 if issc:
302 self.save_global(obj)
303 return
304
305 # Check copy_reg.dispatch_table
306 reduce = dispatch_table.get(t)
307 if reduce:
308 rv = reduce(obj)
309 else:
310 # Check for a __reduce_ex__ method, fall back to __reduce__
311 reduce = getattr(obj, "__reduce_ex__", None)
312 if reduce:
313 rv = reduce(self.proto)
314 else:
315 reduce = getattr(obj, "__reduce__", None)
316 if reduce:
317 rv = reduce()
318 else:
319 raise PicklingError("Can't pickle %r object: %r" %
320 (t.__name__, obj))
321
322 # Check for string returned by reduce(), meaning "save as global"
323 if type(rv) is StringType:
324 self.save_global(obj, rv)
325 return
326
327 # Assert that reduce() returned a tuple
328 if type(rv) is not TupleType:
329 raise PicklingError("%s must return string or tuple" % reduce)
330
331 # Assert that it returned an appropriately sized tuple
332 l = len(rv)
333 if not (2 <= l <= 5):
334 raise PicklingError("Tuple returned by %s must have "
335 "two to five elements" % reduce)
336
337 # Save the reduce() output and finally memoize the object
338 self.save_reduce(obj=obj, *rv)
339
340 def persistent_id(self, obj):
341 # This exists so a subclass can override it
342 return None
343
344 def save_pers(self, pid):
345 # Save a persistent id reference
346 if self.bin:
347 self.save(pid)
348 self.write(BINPERSID)
349 else:
350 self.write(PERSID + str(pid) + '\n')
351
352 def save_reduce(self, func, args, state=None,
353 listitems=None, dictitems=None, obj=None):
354 # This API is called by some subclasses
355
356 # Assert that args is a tuple or None
357 if not isinstance(args, TupleType):
358 if args is None:
359 # A hack for Jim Fulton's ExtensionClass, now deprecated.
360 # See load_reduce()
361 warnings.warn("__basicnew__ special case is deprecated",
362 DeprecationWarning)
363 else:
364 raise PicklingError(
365 "args from reduce() should be a tuple")
366
367 # Assert that func is callable
368 if not callable(func):
369 raise PicklingError("func from reduce should be callable")
370
371 save = self.save
372 write = self.write
373
374 # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
375 if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
376 # A __reduce__ implementation can direct protocol 2 to
377 # use the more efficient NEWOBJ opcode, while still
378 # allowing protocol 0 and 1 to work normally. For this to
379 # work, the function returned by __reduce__ should be
380 # called __newobj__, and its first argument should be a
381 # new-style class. The implementation for __newobj__
382 # should be as follows, although pickle has no way to
383 # verify this:
384 #
385 # def __newobj__(cls, *args):
386 # return cls.__new__(cls, *args)
387 #
388 # Protocols 0 and 1 will pickle a reference to __newobj__,
389 # while protocol 2 (and above) will pickle a reference to
390 # cls, the remaining args tuple, and the NEWOBJ code,
391 # which calls cls.__new__(cls, *args) at unpickling time
392 # (see load_newobj below). If __reduce__ returns a
393 # three-tuple, the state from the third tuple item will be
394 # pickled regardless of the protocol, calling __setstate__
395 # at unpickling time (see load_build below).
396 #
397 # Note that no standard __newobj__ implementation exists;
398 # you have to provide your own. This is to enforce
399 # compatibility with Python 2.2 (pickles written using
400 # protocol 0 or 1 in Python 2.3 should be unpicklable by
401 # Python 2.2).
402 cls = args[0]
403 if not hasattr(cls, "__new__"):
404 raise PicklingError(
405 "args[0] from __newobj__ args has no __new__")
406 if obj is not None and cls is not obj.__class__:
407 raise PicklingError(
408 "args[0] from __newobj__ args has the wrong class")
409 args = args[1:]
410 save(cls)
411 save(args)
412 write(NEWOBJ)
413 else:
414 save(func)
415 save(args)
416 write(REDUCE)
417
418 if obj is not None:
419 self.memoize(obj)
420
421 # More new special cases (that work with older protocols as
422 # well): when __reduce__ returns a tuple with 4 or 5 items,
423 # the 4th and 5th item should be iterators that provide list
424 # items and dict items (as (key, value) tuples), or None.
425
426 if listitems is not None:
427 self._batch_appends(listitems)
428
429 if dictitems is not None:
430 self._batch_setitems(dictitems)
431
432 if state is not None:
433 save(state)
434 write(BUILD)
435
436 # Methods below this point are dispatched through the dispatch table
437
438 dispatch = {}
439
440 def save_none(self, obj):
441 self.write(NONE)
442 dispatch[NoneType] = save_none
443
444 def save_bool(self, obj):
445 if self.proto >= 2:
446 self.write(obj and NEWTRUE or NEWFALSE)
447 else:
448 self.write(obj and TRUE or FALSE)
449 dispatch[bool] = save_bool
450
451 def save_int(self, obj, pack=struct.pack):
452 if self.bin:
453 # If the int is small enough to fit in a signed 4-byte 2's-comp
454 # format, we can store it more efficiently than the general
455 # case.
456 # First one- and two-byte unsigned ints:
457 if obj >= 0:
458 if obj <= 0xff:
459 self.write(BININT1 + chr(obj))
460 return
461 if obj <= 0xffff:
462 self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
463 return
464 # Next check for 4-byte signed ints:
465 high_bits = obj >> 31 # note that Python shift sign-extends
466 if high_bits == 0 or high_bits == -1:
467 # All high bits are copies of bit 2**31, so the value
468 # fits in a 4-byte signed int.
469 self.write(BININT + pack("<i", obj))
470 return
471 # Text pickle, or int too big to fit in signed 4-byte format.
472 self.write(INT + repr(obj) + '\n')
473 dispatch[IntType] = save_int
474
475 def save_long(self, obj, pack=struct.pack):
476 if self.proto >= 2:
477 bytes = encode_long(obj)
478 n = len(bytes)
479 if n < 256:
480 self.write(LONG1 + chr(n) + bytes)
481 else:
482 self.write(LONG4 + pack("<i", n) + bytes)
483 return
484 self.write(LONG + repr(obj) + '\n')
485 dispatch[LongType] = save_long
486
487 def save_float(self, obj, pack=struct.pack):
488 if self.bin:
489 self.write(BINFLOAT + pack('>d', obj))
490 else:
491 self.write(FLOAT + repr(obj) + '\n')
492 dispatch[FloatType] = save_float
493
494 def save_string(self, obj, pack=struct.pack):
495 if self.bin:
496 n = len(obj)
497 if n < 256:
498 self.write(SHORT_BINSTRING + chr(n) + obj)
499 else:
500 self.write(BINSTRING + pack("<i", n) + obj)
501 else:
502 self.write(STRING + repr(obj) + '\n')
503 self.memoize(obj)
504 dispatch[StringType] = save_string
505
506 def save_unicode(self, obj, pack=struct.pack):
507 if self.bin:
508 encoding = obj.encode('utf-8')
509 n = len(encoding)
510 self.write(BINUNICODE + pack("<i", n) + encoding)
511 else:
512 obj = obj.replace("\\", "\\u005c")
513 obj = obj.replace("\n", "\\u000a")
514 self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
515 self.memoize(obj)
516 dispatch[UnicodeType] = save_unicode
517
518 if StringType == UnicodeType:
519 # This is true for Jython
520 def save_string(self, obj, pack=struct.pack):
521 unicode = obj.isunicode()
522
523 if self.bin:
524 if unicode:
525 obj = obj.encode("utf-8")
526 l = len(obj)
527 if l < 256 and not unicode:
528 self.write(SHORT_BINSTRING + chr(l) + obj)
529 else:
530 s = pack("<i", l)
531 if unicode:
532 self.write(BINUNICODE + s + obj)
533 else:
534 self.write(BINSTRING + s + obj)
535 else:
536 if unicode:
537 obj = obj.replace("\\", "\\u005c")
538 obj = obj.replace("\n", "\\u000a")
539 obj = obj.encode('raw-unicode-escape')
540 self.write(UNICODE + obj + '\n')
541 else:
542 self.write(STRING + repr(obj) + '\n')
543 self.memoize(obj)
544 dispatch[StringType] = save_string
545
546 def save_tuple(self, obj):
547 write = self.write
548 proto = self.proto
549
550 n = len(obj)
551 if n == 0:
552 if proto:
553 write(EMPTY_TUPLE)
554 else:
555 write(MARK + TUPLE)
556 return
557
558 save = self.save
559 memo = self.memo
560 if n <= 3 and proto >= 2:
561 for element in obj:
562 save(element)
563 # Subtle. Same as in the big comment below.
564 if id(obj) in memo:
565 get = self.get(memo[id(obj)][0])
566 write(POP * n + get)
567 else:
568 write(_tuplesize2code[n])
569 self.memoize(obj)
570 return
571
572 # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
573 # has more than 3 elements.
574 write(MARK)
575 for element in obj:
576 save(element)
577
578 if id(obj) in memo:
579 # Subtle. d was not in memo when we entered save_tuple(), so
580 # the process of saving the tuple's elements must have saved
581 # the tuple itself: the tuple is recursive. The proper action
582 # now is to throw away everything we put on the stack, and
583 # simply GET the tuple (it's already constructed). This check
584 # could have been done in the "for element" loop instead, but
585 # recursive tuples are a rare thing.
586 get = self.get(memo[id(obj)][0])
587 if proto:
588 write(POP_MARK + get)
589 else: # proto 0 -- POP_MARK not available
590 write(POP * (n+1) + get)
591 return
592
593 # No recursion.
594 self.write(TUPLE)
595 self.memoize(obj)
596
597 dispatch[TupleType] = save_tuple
598
599 # save_empty_tuple() isn't used by anything in Python 2.3. However, I
600 # found a Pickler subclass in Zope3 that calls it, so it's not harmless
601 # to remove it.
602 def save_empty_tuple(self, obj):
603 self.write(EMPTY_TUPLE)
604
605 def save_list(self, obj):
606 write = self.write
607
608 if self.bin:
609 write(EMPTY_LIST)
610 else: # proto 0 -- can't use EMPTY_LIST
611 write(MARK + LIST)
612
613 self.memoize(obj)
614 self._batch_appends(iter(obj))
615
616 dispatch[ListType] = save_list
617
618 # Keep in synch with cPickle's BATCHSIZE. Nothing will break if it gets
619 # out of synch, though.
620 _BATCHSIZE = 1000
621
622 def _batch_appends(self, items):
623 # Helper to batch up APPENDS sequences
624 save = self.save
625 write = self.write
626
627 if not self.bin:
628 for x in items:
629 save(x)
630 write(APPEND)
631 return
632
633 r = xrange(self._BATCHSIZE)
634 while items is not None:
635 tmp = []
636 for i in r:
637 try:
638 x = items.next()
639 tmp.append(x)
640 except StopIteration:
641 items = None
642 break
643 n = len(tmp)
644 if n > 1:
645 write(MARK)
646 for x in tmp:
647 save(x)
648 write(APPENDS)
649 elif n:
650 save(tmp[0])
651 write(APPEND)
652 # else tmp is empty, and we're done
653
654 def save_dict(self, obj):
655 write = self.write
656
657 if self.bin:
658 write(EMPTY_DICT)
659 else: # proto 0 -- can't use EMPTY_DICT
660 write(MARK + DICT)
661
662 self.memoize(obj)
663 self._batch_setitems(obj.iteritems())
664
665 dispatch[DictionaryType] = save_dict
666 if not PyStringMap is None:
667 dispatch[PyStringMap] = save_dict
668
669 def _batch_setitems(self, items):
670 # Helper to batch up SETITEMS sequences; proto >= 1 only
671 save = self.save
672 write = self.write
673
674 if not self.bin:
675 for k, v in items:
676 save(k)
677 save(v)
678 write(SETITEM)
679 return
680
681 r = xrange(self._BATCHSIZE)
682 while items is not None:
683 tmp = []
684 for i in r:
685 try:
686 tmp.append(items.next())
687 except StopIteration:
688 items = None
689 break
690 n = len(tmp)
691 if n > 1:
692 write(MARK)
693 for k, v in tmp:
694 save(k)
695 save(v)
696 write(SETITEMS)
697 elif n:
698 k, v = tmp[0]
699 save(k)
700 save(v)
701 write(SETITEM)
702 # else tmp is empty, and we're done
703
704 def save_inst(self, obj):
705 cls = obj.__class__
706
707 memo = self.memo
708 write = self.write
709 save = self.save
710
711 if hasattr(obj, '__getinitargs__'):
712 args = obj.__getinitargs__()
713 len(args) # XXX Assert it's a sequence
714 _keep_alive(args, memo)
715 else:
716 args = ()
717
718 write(MARK)
719
720 if self.bin:
721 save(cls)
722 for arg in args:
723 save(arg)
724 write(OBJ)
725 else:
726 for arg in args:
727 save(arg)
728 write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
729
730 self.memoize(obj)
731
732 try:
733 getstate = obj.__getstate__
734 except AttributeError:
735 stuff = obj.__dict__
736 else:
737 stuff = getstate()
738 _keep_alive(stuff, memo)
739 save(stuff)
740 write(BUILD)
741
742 dispatch[InstanceType] = save_inst
743
744 def save_global(self, obj, name=None, pack=struct.pack):
745 write = self.write
746 memo = self.memo
747
748 if name is None:
749 name = obj.__name__
750
751 module = getattr(obj, "__module__", None)
752 if module is None:
753 module = whichmodule(obj, name)
754
755 try:
756 __import__(module)
757 mod = sys.modules[module]
758 klass = getattr(mod, name)
759 except (ImportError, KeyError, AttributeError):
760 raise PicklingError(
761 "Can't pickle %r: it's not found as %s.%s" %
762 (obj, module, name))
763 else:
764 if klass is not obj:
765 raise PicklingError(
766 "Can't pickle %r: it's not the same object as %s.%s" %
767 (obj, module, name))
768
769 if self.proto >= 2:
770 code = _extension_registry.get((module, name))
771 if code:
772 assert code > 0
773 if code <= 0xff:
774 write(EXT1 + chr(code))
775 elif code <= 0xffff:
776 write("%c%c%c" % (EXT2, code&0xff, code>>8))
777 else:
778 write(EXT4 + pack("<i", code))
779 return
780
781 write(GLOBAL + module + '\n' + name + '\n')
782 self.memoize(obj)
783
784 dispatch[ClassType] = save_global
785 dispatch[FunctionType] = save_global
786 dispatch[BuiltinFunctionType] = save_global
787 dispatch[TypeType] = save_global
788
789# Pickling helpers
790
791def _keep_alive(x, memo):
792 """Keeps a reference to the object x in the memo.
793
794 Because we remember objects by their id, we have
795 to assure that possibly temporary objects are kept
796 alive by referencing them.
797 We store a reference at the id of the memo, which should
798 normally not be used unless someone tries to deepcopy
799 the memo itself...
800 """
801 try:
802 memo[id(memo)].append(x)
803 except KeyError:
804 # aha, this is the first one :-)
805 memo[id(memo)]=[x]
806
807
808# A cache for whichmodule(), mapping a function object to the name of
809# the module in which the function was found.
810
811classmap = {} # called classmap for backwards compatibility
812
813def whichmodule(func, funcname):
814 """Figure out the module in which a function occurs.
815
816 Search sys.modules for the module.
817 Cache in classmap.
818 Return a module name.
819 If the function cannot be found, return "__main__".
820 """
821 # Python functions should always get an __module__ from their globals.
822 mod = getattr(func, "__module__", None)
823 if mod is not None:
824 return mod
825 if func in classmap:
826 return classmap[func]
827
828 for name, module in sys.modules.items():
829 if module is None:
830 continue # skip dummy package entries
831 if name != '__main__' and getattr(module, funcname, None) is func:
832 break
833 else:
834 name = '__main__'
835 classmap[func] = name
836 return name
837
838
839# Unpickling machinery
840
841class Unpickler:
842
843 def __init__(self, file):
844 """This takes a file-like object for reading a pickle data stream.
845
846 The protocol version of the pickle is detected automatically, so no
847 proto argument is needed.
848
849 The file-like object must have two methods, a read() method that
850 takes an integer argument, and a readline() method that requires no
851 arguments. Both methods should return a string. Thus file-like
852 object can be a file object opened for reading, a StringIO object,
853 or any other custom object that meets this interface.
854 """
855 self.readline = file.readline
856 self.read = file.read
857 self.memo = {}
858
859 def load(self):
860 """Read a pickled object representation from the open file.
861
862 Return the reconstituted object hierarchy specified in the file.
863 """
864 self.mark = object() # any new unique object
865 self.stack = []
866 self.append = self.stack.append
867 read = self.read
868 dispatch = self.dispatch
869 try:
870 while 1:
871 key = read(1)
872 dispatch[key](self)
873 except _Stop, stopinst:
874 return stopinst.value
875
876 # Return largest index k such that self.stack[k] is self.mark.
877 # If the stack doesn't contain a mark, eventually raises IndexError.
878 # This could be sped by maintaining another stack, of indices at which
879 # the mark appears. For that matter, the latter stack would suffice,
880 # and we wouldn't need to push mark objects on self.stack at all.
881 # Doing so is probably a good thing, though, since if the pickle is
882 # corrupt (or hostile) we may get a clue from finding self.mark embedded
883 # in unpickled objects.
884 def marker(self):
885 stack = self.stack
886 mark = self.mark
887 k = len(stack)-1
888 while stack[k] is not mark: k = k-1
889 return k
890
891 dispatch = {}
892
893 def load_eof(self):
894 raise EOFError
895 dispatch[''] = load_eof
896
897 def load_proto(self):
898 proto = ord(self.read(1))
899 if not 0 <= proto <= 2:
900 raise ValueError, "unsupported pickle protocol: %d" % proto
901 dispatch[PROTO] = load_proto
902
903 def load_persid(self):
904 pid = self.readline()[:-1]
905 self.append(self.persistent_load(pid))
906 dispatch[PERSID] = load_persid
907
908 def load_binpersid(self):
909 pid = self.stack.pop()
910 self.append(self.persistent_load(pid))
911 dispatch[BINPERSID] = load_binpersid
912
913 def load_none(self):
914 self.append(None)
915 dispatch[NONE] = load_none
916
917 def load_false(self):
918 self.append(False)
919 dispatch[NEWFALSE] = load_false
920
921 def load_true(self):
922 self.append(True)
923 dispatch[NEWTRUE] = load_true
924
925 def load_int(self):
926 data = self.readline()
927 if data == FALSE[1:]:
928 val = False
929 elif data == TRUE[1:]:
930 val = True
931 else:
932 try:
933 val = int(data)
934 except ValueError:
935 val = long(data)
936 self.append(val)
937 dispatch[INT] = load_int
938
939 def load_binint(self):
940 self.append(mloads('i' + self.read(4)))
941 dispatch[BININT] = load_binint
942
943 def load_binint1(self):
944 self.append(ord(self.read(1)))
945 dispatch[BININT1] = load_binint1
946
947 def load_binint2(self):
948 self.append(mloads('i' + self.read(2) + '\000\000'))
949 dispatch[BININT2] = load_binint2
950
951 def load_long(self):
952 self.append(long(self.readline()[:-1], 0))
953 dispatch[LONG] = load_long
954
955 def load_long1(self):
956 n = ord(self.read(1))
957 bytes = self.read(n)
958 self.append(decode_long(bytes))
959 dispatch[LONG1] = load_long1
960
961 def load_long4(self):
962 n = mloads('i' + self.read(4))
963 bytes = self.read(n)
964 self.append(decode_long(bytes))
965 dispatch[LONG4] = load_long4
966
967 def load_float(self):
968 self.append(float(self.readline()[:-1]))
969 dispatch[FLOAT] = load_float
970
971 def load_binfloat(self, unpack=struct.unpack):
972 self.append(unpack('>d', self.read(8))[0])
973 dispatch[BINFLOAT] = load_binfloat
974
975 def load_string(self):
976 rep = self.readline()[:-1]
977 for q in "\"'": # double or single quote
978 if rep.startswith(q):
979 if not rep.endswith(q):
980 raise ValueError, "insecure string pickle"
981 rep = rep[len(q):-len(q)]
982 break
983 else:
984 raise ValueError, "insecure string pickle"
985 self.append(rep.decode("string-escape"))
986 dispatch[STRING] = load_string
987
988 def load_binstring(self):
989 len = mloads('i' + self.read(4))
990 self.append(self.read(len))
991 dispatch[BINSTRING] = load_binstring
992
993 def load_unicode(self):
994 self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
995 dispatch[UNICODE] = load_unicode
996
997 def load_binunicode(self):
998 len = mloads('i' + self.read(4))
999 self.append(unicode(self.read(len),'utf-8'))
1000 dispatch[BINUNICODE] = load_binunicode
1001
1002 def load_short_binstring(self):
1003 len = ord(self.read(1))
1004 self.append(self.read(len))
1005 dispatch[SHORT_BINSTRING] = load_short_binstring
1006
1007 def load_tuple(self):
1008 k = self.marker()
1009 self.stack[k:] = [tuple(self.stack[k+1:])]
1010 dispatch[TUPLE] = load_tuple
1011
1012 def load_empty_tuple(self):
1013 self.stack.append(())
1014 dispatch[EMPTY_TUPLE] = load_empty_tuple
1015
1016 def load_tuple1(self):
1017 self.stack[-1] = (self.stack[-1],)
1018 dispatch[TUPLE1] = load_tuple1
1019
1020 def load_tuple2(self):
1021 self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
1022 dispatch[TUPLE2] = load_tuple2
1023
1024 def load_tuple3(self):
1025 self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
1026 dispatch[TUPLE3] = load_tuple3
1027
1028 def load_empty_list(self):
1029 self.stack.append([])
1030 dispatch[EMPTY_LIST] = load_empty_list
1031
1032 def load_empty_dictionary(self):
1033 self.stack.append({})
1034 dispatch[EMPTY_DICT] = load_empty_dictionary
1035
1036 def load_list(self):
1037 k = self.marker()
1038 self.stack[k:] = [self.stack[k+1:]]
1039 dispatch[LIST] = load_list
1040
1041 def load_dict(self):
1042 k = self.marker()
1043 d = {}
1044 items = self.stack[k+1:]
1045 for i in range(0, len(items), 2):
1046 key = items[i]
1047 value = items[i+1]
1048 d[key] = value
1049 self.stack[k:] = [d]
1050 dispatch[DICT] = load_dict
1051
1052 # INST and OBJ differ only in how they get a class object. It's not
1053 # only sensible to do the rest in a common routine, the two routines
1054 # previously diverged and grew different bugs.
1055 # klass is the class to instantiate, and k points to the topmost mark
1056 # object, following which are the arguments for klass.__init__.
1057 def _instantiate(self, klass, k):
1058 args = tuple(self.stack[k+1:])
1059 del self.stack[k:]
1060 instantiated = 0
1061 if (not args and
1062 type(klass) is ClassType and
1063 not hasattr(klass, "__getinitargs__")):
1064 try:
1065 value = _EmptyClass()
1066 value.__class__ = klass
1067 instantiated = 1
1068 except RuntimeError:
1069 # In restricted execution, assignment to inst.__class__ is
1070 # prohibited
1071 pass
1072 if not instantiated:
1073 try:
1074 value = klass(*args)
1075 except TypeError, err:
1076 raise TypeError, "in constructor for %s: %s" % (
1077 klass.__name__, str(err)), sys.exc_info()[2]
1078 self.append(value)
1079
1080 def load_inst(self):
1081 module = self.readline()[:-1]
1082 name = self.readline()[:-1]
1083 klass = self.find_class(module, name)
1084 self._instantiate(klass, self.marker())
1085 dispatch[INST] = load_inst
1086
1087 def load_obj(self):
1088 # Stack is ... markobject classobject arg1 arg2 ...
1089 k = self.marker()
1090 klass = self.stack.pop(k+1)
1091 self._instantiate(klass, k)
1092 dispatch[OBJ] = load_obj
1093
1094 def load_newobj(self):
1095 args = self.stack.pop()
1096 cls = self.stack[-1]
1097 obj = cls.__new__(cls, *args)
1098 self.stack[-1] = obj
1099 dispatch[NEWOBJ] = load_newobj
1100
1101 def load_global(self):
1102 module = self.readline()[:-1]
1103 name = self.readline()[:-1]
1104 klass = self.find_class(module, name)
1105 self.append(klass)
1106 dispatch[GLOBAL] = load_global
1107
1108 def load_ext1(self):
1109 code = ord(self.read(1))
1110 self.get_extension(code)
1111 dispatch[EXT1] = load_ext1
1112
1113 def load_ext2(self):
1114 code = mloads('i' + self.read(2) + '\000\000')
1115 self.get_extension(code)
1116 dispatch[EXT2] = load_ext2
1117
1118 def load_ext4(self):
1119 code = mloads('i' + self.read(4))
1120 self.get_extension(code)
1121 dispatch[EXT4] = load_ext4
1122
1123 def get_extension(self, code):
1124 nil = []
1125 obj = _extension_cache.get(code, nil)
1126 if obj is not nil:
1127 self.append(obj)
1128 return
1129 key = _inverted_registry.get(code)
1130 if not key:
1131 raise ValueError("unregistered extension code %d" % code)
1132 obj = self.find_class(*key)
1133 _extension_cache[code] = obj
1134 self.append(obj)
1135
1136 def find_class(self, module, name):
1137 # Subclasses may override this
1138 __import__(module)
1139 mod = sys.modules[module]
1140 klass = getattr(mod, name)
1141 return klass
1142
1143 def load_reduce(self):
1144 stack = self.stack
1145 args = stack.pop()
1146 func = stack[-1]
1147 if args is None:
1148 # A hack for Jim Fulton's ExtensionClass, now deprecated
1149 warnings.warn("__basicnew__ special case is deprecated",
1150 DeprecationWarning)
1151 value = func.__basicnew__()
1152 else:
1153 value = func(*args)
1154 stack[-1] = value
1155 dispatch[REDUCE] = load_reduce
1156
1157 def load_pop(self):
1158 del self.stack[-1]
1159 dispatch[POP] = load_pop
1160
1161 def load_pop_mark(self):
1162 k = self.marker()
1163 del self.stack[k:]
1164 dispatch[POP_MARK] = load_pop_mark
1165
1166 def load_dup(self):
1167 self.append(self.stack[-1])
1168 dispatch[DUP] = load_dup
1169
1170 def load_get(self):
1171 self.append(self.memo[self.readline()[:-1]])
1172 dispatch[GET] = load_get
1173
1174 def load_binget(self):
1175 i = ord(self.read(1))
1176 self.append(self.memo[repr(i)])
1177 dispatch[BINGET] = load_binget
1178
1179 def load_long_binget(self):
1180 i = mloads('i' + self.read(4))
1181 self.append(self.memo[repr(i)])
1182 dispatch[LONG_BINGET] = load_long_binget
1183
1184 def load_put(self):
1185 self.memo[self.readline()[:-1]] = self.stack[-1]
1186 dispatch[PUT] = load_put
1187
1188 def load_binput(self):
1189 i = ord(self.read(1))
1190 self.memo[repr(i)] = self.stack[-1]
1191 dispatch[BINPUT] = load_binput
1192
1193 def load_long_binput(self):
1194 i = mloads('i' + self.read(4))
1195 self.memo[repr(i)] = self.stack[-1]
1196 dispatch[LONG_BINPUT] = load_long_binput
1197
1198 def load_append(self):
1199 stack = self.stack
1200 value = stack.pop()
1201 list = stack[-1]
1202 list.append(value)
1203 dispatch[APPEND] = load_append
1204
1205 def load_appends(self):
1206 stack = self.stack
1207 mark = self.marker()
1208 list = stack[mark - 1]
1209 list.extend(stack[mark + 1:])
1210 del stack[mark:]
1211 dispatch[APPENDS] = load_appends
1212
1213 def load_setitem(self):
1214 stack = self.stack
1215 value = stack.pop()
1216 key = stack.pop()
1217 dict = stack[-1]
1218 dict[key] = value
1219 dispatch[SETITEM] = load_setitem
1220
1221 def load_setitems(self):
1222 stack = self.stack
1223 mark = self.marker()
1224 dict = stack[mark - 1]
1225 for i in range(mark + 1, len(stack), 2):
1226 dict[stack[i]] = stack[i + 1]
1227
1228 del stack[mark:]
1229 dispatch[SETITEMS] = load_setitems
1230
1231 def load_build(self):
1232 stack = self.stack
1233 state = stack.pop()
1234 inst = stack[-1]
1235 setstate = getattr(inst, "__setstate__", None)
1236 if setstate:
1237 setstate(state)
1238 return
1239 slotstate = None
1240 if isinstance(state, tuple) and len(state) == 2:
1241 state, slotstate = state
1242 if state:
1243 try:
1244 inst.__dict__.update(state)
1245 except RuntimeError:
1246 # XXX In restricted execution, the instance's __dict__
1247 # is not accessible. Use the old way of unpickling
1248 # the instance variables. This is a semantic
1249 # difference when unpickling in restricted
1250 # vs. unrestricted modes.
1251 # Note, however, that cPickle has never tried to do the
1252 # .update() business, and always uses
1253 # PyObject_SetItem(inst.__dict__, key, value) in a
1254 # loop over state.items().
1255 for k, v in state.items():
1256 setattr(inst, k, v)
1257 if slotstate:
1258 for k, v in slotstate.items():
1259 setattr(inst, k, v)
1260 dispatch[BUILD] = load_build
1261
1262 def load_mark(self):
1263 self.append(self.mark)
1264 dispatch[MARK] = load_mark
1265
1266 def load_stop(self):
1267 value = self.stack.pop()
1268 raise _Stop(value)
1269 dispatch[STOP] = load_stop
1270
1271# Helper class for load_inst/load_obj
1272
1273class _EmptyClass:
1274 pass
1275
1276# Encode/decode longs in linear time.
1277
1278import binascii as _binascii
1279
1280def encode_long(x):
1281 r"""Encode a long to a two's complement little-endian binary string.
1282 Note that 0L is a special case, returning an empty string, to save a
1283 byte in the LONG1 pickling context.
1284
1285 >>> encode_long(0L)
1286 ''
1287 >>> encode_long(255L)
1288 '\xff\x00'
1289 >>> encode_long(32767L)
1290 '\xff\x7f'
1291 >>> encode_long(-256L)
1292 '\x00\xff'
1293 >>> encode_long(-32768L)
1294 '\x00\x80'
1295 >>> encode_long(-128L)
1296 '\x80'
1297 >>> encode_long(127L)
1298 '\x7f'
1299 >>>
1300 """
1301
1302 if x == 0:
1303 return ''
1304 if x > 0:
1305 ashex = hex(x)
1306 assert ashex.startswith("0x")
1307 njunkchars = 2 + ashex.endswith('L')
1308 nibbles = len(ashex) - njunkchars
1309 if nibbles & 1:
1310 # need an even # of nibbles for unhexlify
1311 ashex = "0x0" + ashex[2:]
1312 elif int(ashex[2], 16) >= 8:
1313 # "looks negative", so need a byte of sign bits
1314 ashex = "0x00" + ashex[2:]
1315 else:
1316 # Build the 256's-complement: (1L << nbytes) + x. The trick is
1317 # to find the number of bytes in linear time (although that should
1318 # really be a constant-time task).
1319 ashex = hex(-x)
1320 assert ashex.startswith("0x")
1321 njunkchars = 2 + ashex.endswith('L')
1322 nibbles = len(ashex) - njunkchars
1323 if nibbles & 1:
1324 # Extend to a full byte.
1325 nibbles += 1
1326 nbits = nibbles * 4
1327 x += 1L << nbits
1328 assert x > 0
1329 ashex = hex(x)
1330 njunkchars = 2 + ashex.endswith('L')
1331 newnibbles = len(ashex) - njunkchars
1332 if newnibbles < nibbles:
1333 ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
1334 if int(ashex[2], 16) < 8:
1335 # "looks positive", so need a byte of sign bits
1336 ashex = "0xff" + ashex[2:]
1337
1338 if ashex.endswith('L'):
1339 ashex = ashex[2:-1]
1340 else:
1341 ashex = ashex[2:]
1342 assert len(ashex) & 1 == 0, (x, ashex)
1343 binary = _binascii.unhexlify(ashex)
1344 return binary[::-1]
1345
1346def decode_long(data):
1347 r"""Decode a long from a two's complement little-endian binary string.
1348
1349 >>> decode_long('')
1350 0L
1351 >>> decode_long("\xff\x00")
1352 255L
1353 >>> decode_long("\xff\x7f")
1354 32767L
1355 >>> decode_long("\x00\xff")
1356 -256L
1357 >>> decode_long("\x00\x80")
1358 -32768L
1359 >>> decode_long("\x80")
1360 -128L
1361 >>> decode_long("\x7f")
1362 127L
1363 """
1364
1365 nbytes = len(data)
1366 if nbytes == 0:
1367 return 0L
1368 ashex = _binascii.hexlify(data[::-1])
1369 n = long(ashex, 16) # quadratic time before Python 2.3; linear now
1370 if data[-1] >= '\x80':
1371 n -= 1L << (nbytes * 8)
1372 return n
1373
1374# Shorthands
1375
1376try:
1377 from cStringIO import StringIO
1378except ImportError:
1379 from StringIO import StringIO
1380
1381def dump(obj, file, protocol=None, bin=None):
1382 Pickler(file, protocol, bin).dump(obj)
1383
1384def dumps(obj, protocol=None, bin=None):
1385 file = StringIO()
1386 Pickler(file, protocol, bin).dump(obj)
1387 return file.getvalue()
1388
1389def load(file):
1390 return Unpickler(file).load()
1391
1392def loads(str):
1393 file = StringIO(str)
1394 return Unpickler(file).load()
1395
1396# Doctest
1397
1398def _test():
1399 import doctest
1400 return doctest.testmod()
1401
1402if __name__ == "__main__":
1403 _test()