-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Expand file tree
/
Copy pathflow.py
More file actions
executable file
·1935 lines (1699 loc) · 66.9 KB
/
flow.py
File metadata and controls
executable file
·1935 lines (1699 loc) · 66.9 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import sys
import os.path
import traceback
from typing import Optional
from semmle.python import ast
from semmle import util
from semmle.python.passes.ast_pass import iter_fields
from semmle.python.passes._pass import Pass
from semmle.python.passes import pruner
from semmle.python.passes import splitter
from semmle.python.passes import unroller
from semmle.python import modules
import semmle.graph as graph
from semmle.logging import Logger
__all__ = [ 'FlowPass' ]
class ConsistencyError(util.SemmleError):
pass
def error(node, _):
raise ConsistencyError("Unexpected node type " + type(node).__name__)
class FlowNode(object):
__slots__ = [ 'node' ]
def __init__(self, node):
self.node = node
def __repr__(self):
if hasattr(self.node, "lineno"):
return 'FlowNode(%s at %d)' % (type(self.node), self.node.lineno)
else:
return 'FlowNode(%r)' % self.node
def copy(self):
return FlowNode(self.node)
#Kinds of node sets.
NORMAL = util.NORMAL_EDGE
TRUE = util.TRUE_EDGE
FALSE = util.FALSE_EDGE
EXCEPTION = util.EXCEPTIONAL_EDGE
EXHAUSTED = util.EXHAUSTED_EDGE
TRUE_OR_FALSE = TRUE | FALSE
#Set of names of modules that are guaranteed to be in the interpreter regardless of platform
GUARANTEED_MODULES = {
"_ast",
"_bisect",
"_codecs",
"_collections",
"_functools",
"_heapq",
"_io",
"_locale",
"_md5",
"_operator",
"_random",
"_sha256",
"_sha512",
"_socket",
"_sre",
"_struct",
"_symtable",
"_warnings",
"_weakref",
"array",
"binascii",
"cmath",
"errno",
"gc",
"itertools",
"marshal",
"math",
"sys",
"syslog",
"time",
"unicodedata",
"zipimport",
"zlib",
}
_py3_names = {
"ArithmeticError",
"AssertionError",
"AttributeError",
"BaseException",
"BlockingIOError",
"BrokenPipeError",
"BufferError",
"BytesWarning",
"ChildProcessError",
"ConnectionAbortedError",
"ConnectionError",
"ConnectionRefusedError",
"ConnectionResetError",
"DeprecationWarning",
"EOFError",
"Ellipsis",
"EnvironmentError",
"Exception",
"False",
"FileExistsError",
"FileNotFoundError",
"FloatingPointError",
"FutureWarning",
"GeneratorExit",
"IOError",
"ImportError",
"ImportWarning",
"IndentationError",
"IndexError",
"InterruptedError",
"IsADirectoryError",
"KeyError",
"KeyboardInterrupt",
"LookupError",
"MemoryError",
"NameError",
"None",
"NotADirectoryError",
"NotImplemented",
"NotImplementedError",
"OSError",
"OverflowError",
"PendingDeprecationWarning",
"PermissionError",
"ProcessLookupError",
"ReferenceError",
"ResourceWarning",
"RuntimeError",
"RuntimeWarning",
"StopIteration",
"SyntaxError",
"SyntaxWarning",
"SystemError",
"SystemExit",
"TabError",
"TimeoutError",
"True",
"TypeError",
"UnboundLocalError",
"UnicodeDecodeError",
"UnicodeEncodeError",
"UnicodeError",
"UnicodeTranslateError",
"UnicodeWarning",
"UserWarning",
"ValueError",
"Warning",
"ZeroDivisionError",
"__build_class__",
"__debug__",
"__doc__",
"__import__",
"__loader__",
"__name__",
"__package__",
"__spec__",
"abs",
"all",
"any",
"ascii",
"bin",
"bool",
"bytearray",
"bytes",
# "callable", only 3.2+
"chr",
"classmethod",
"compile",
"complex",
"copyright",
"credits",
"delattr",
"dict",
"dir",
"divmod",
"enumerate",
"eval",
"exec",
"exit",
"filter",
"float",
"format",
"frozenset",
"getattr",
"globals",
"hasattr",
"hash",
"help",
"hex",
"id",
"input",
"int",
"isinstance",
"issubclass",
"iter",
"len",
"license",
"list",
"locals",
"map",
"max",
"memoryview",
"min",
"next",
"object",
"oct",
"open",
"ord",
"pow",
"print",
"property",
"quit",
"range",
"repr",
"reversed",
"round",
"set",
"setattr",
"slice",
"sorted",
"staticmethod",
"str",
"sum",
"super",
"tuple",
"type",
"vars",
"zip",
}
_py2_names = {
"ArithmeticError",
"AssertionError",
"AttributeError",
"BaseException",
"BufferError",
"BytesWarning",
"DeprecationWarning",
"EOFError",
"Ellipsis",
"EnvironmentError",
"Exception",
"False",
"FloatingPointError",
"FutureWarning",
"GeneratorExit",
"IOError",
"ImportError",
"ImportWarning",
"IndentationError",
"IndexError",
"KeyError",
"KeyboardInterrupt",
"LookupError",
"MemoryError",
"NameError",
"None",
"NotImplemented",
"NotImplementedError",
"OSError",
"OverflowError",
"PendingDeprecationWarning",
"ReferenceError",
"RuntimeError",
"RuntimeWarning",
"StandardError",
"StopIteration",
"SyntaxError",
"SyntaxWarning",
"SystemError",
"SystemExit",
"TabError",
"True",
"TypeError",
"UnboundLocalError",
"UnicodeDecodeError",
"UnicodeEncodeError",
"UnicodeError",
"UnicodeTranslateError",
"UnicodeWarning",
"UserWarning",
"ValueError",
"Warning",
"ZeroDivisionError",
"__debug__",
"__doc__",
"__import__",
"__name__",
"__package__",
"abs",
"all",
"any",
"apply",
"basestring",
"bin",
"bool",
"buffer",
"bytearray",
"bytes",
"callable",
"chr",
"classmethod",
"cmp",
"coerce",
"compile",
"complex",
"copyright",
"credits",
"delattr",
"dict",
"dir",
"divmod",
"enumerate",
"eval",
"execfile",
"exit",
"file",
"filter",
"float",
"format",
"frozenset",
"getattr",
"globals",
"hasattr",
"hash",
"help",
"hex",
"id",
"input",
"int",
"intern",
"isinstance",
"issubclass",
"iter",
"len",
"license",
"list",
"locals",
"long",
"map",
"max",
"memoryview",
"min",
"next",
"object",
"oct",
"open",
"ord",
"pow",
"print",
"property",
"quit",
"range",
"raw_input",
"reduce",
"reload",
"repr",
"reversed",
"round",
"set",
"setattr",
"slice",
"sorted",
"staticmethod",
"str",
"sum",
"super",
"tuple",
"type",
"unichr",
"unicode",
"vars",
"xrange",
"zip",
}
#Set of names that always exist (for both Python 2 and 3)
BUILTIN_NAME_ALWAYS_EXISTS = _py2_names.intersection(_py3_names)
# A NodeSet is a conceptually a set of (FlowNode, kind) pairs.
#This class exists to document the interface.
class ExampleNodeSet(object):
'''This class exists for documentation purposes only.'''
def branch(self):
'''Branch into (true, false) pair of nodesets.'''
def __add__(self, other):
'''Add this node set to another, returning the union'''
def normalise(self):
'''Return normalise form of this node set, turning all kinds into NORMAL'''
def exception(self):
'''Return exception form of this node set, turning all kinds into EXCEPTION'''
def merge_true_false_pairs(self):
'''Return copy of this node set with all pairs of TRUE and FALSE kinds for the same node turned into NORMAL'''
def add_node(self, node, kind):
'''Return a new node set with (node, kind) pair added.'''
def invert(self):
'''Return copy of this node set with all TRUE kinds set to FALSE and vice versa.'''
class EmptyNodeSet(object):
def branch(self):
return self, self
def __add__(self, other):
return other
def normalise(self):
return self
def exception(self):
return self
def merge_true_false_pairs(self):
return self
def add_node(self, node, kind):
return SingletonNodeSet(node, kind)
def __iter__(self):
return iter(())
def __len__(self):
return 0
def __str__(self):
return "{}"
def invert(self):
return self
EMPTY = EmptyNodeSet()
class SingletonNodeSet(object):
__slots__ = [ 'node', 'kind']
def __init__(self, node, kind):
self.node = node
self.kind = kind
def branch(self):
if self.kind == TRUE:
return self, EMPTY
elif self.kind == FALSE:
return EMPTY, self
elif self.kind == NORMAL:
return SingletonNodeSet(self.node, TRUE), SingletonNodeSet(self.node, FALSE)
else:
return self, self
def __add__(self, other):
if other is EMPTY:
return self
else:
return other.add_node(self.node, self.kind)
def normalise(self):
return SingletonNodeSet(self.node, NORMAL)
def exception(self):
return SingletonNodeSet(self.node, EXCEPTION)
def merge_true_false_pairs(self):
return self
def add_node(self, node, kind):
if node == self.node and kind == self.kind:
return self
other = MultiNodeSet()
other.append((self.node, self.kind))
other.append((node, kind))
return other
def __iter__(self):
yield self.node, self.kind
def __len__(self):
return 1
def invert(self):
if self.kind & TRUE_OR_FALSE:
return SingletonNodeSet(self.node, self.kind ^ TRUE_OR_FALSE)
else:
return self
def unique_node(self):
return self.node
def __str__(self):
return "{(%s, %d)}" % (self.node, self.kind)
class MultiNodeSet(list):
__slots__ = []
def branch(self):
'''Branch into (true, false) pair of nodesets.'''
l = EMPTY
for node, kind in self:
if kind != FALSE:
l = l.add_node(node, kind)
r = EMPTY
for node, kind in self:
if kind != TRUE:
r = r.add_node(node, kind)
return l, r
def __add__(self, other):
if other is EMPTY:
return self
res = MultiNodeSet(self)
if isinstance(other, SingletonNodeSet):
res.insert_node(other.node, other.kind)
return res
for node, kind in other:
res.insert_node(node, kind)
return res
def convert(self, the_kind):
the_node = self[0][0]
for node, kind in self:
if node != the_node:
break
else:
return SingletonNodeSet(node, the_kind)
res = MultiNodeSet()
for node, kind in self:
res.insert_node(node, the_kind)
return res
def normalise(self):
return self.convert(NORMAL)
def exception(self):
return self.convert(EXCEPTION)
def merge_true_false_pairs(self):
#Common case len() == 2
if len(self) == 2:
if (self[0][1] | self[0][1]) == TRUE_OR_FALSE and self[0][0] == self[1][0]:
return SingletonNodeSet(self[0][0], NORMAL)
else:
return self
#Either no true, or no false edges.
all_kinds = 0
for node, kind in self:
all_kinds |= kind
if (all_kinds & TRUE_OR_FALSE) != TRUE_OR_FALSE:
return self
#General, slow and hopefully rare case.
nodes = {}
for node, kind in self:
if node in nodes:
nodes[node] |= kind
else:
nodes[node] = kind
res = MultiNodeSet()
for node, kind in nodes.items():
if (kind & TRUE_OR_FALSE)== TRUE_OR_FALSE:
kind = (kind | NORMAL) & (NORMAL | EXCEPTION)
for K in (NORMAL, TRUE, FALSE, EXCEPTION):
if kind & K:
res.insert_node(node, K)
return res
def add_node(self, *t):
res = MultiNodeSet(self)
res.insert_node(*t)
return res
def insert_node(self, *t):
if t not in self:
self.append(t)
def __str__(self):
return "{" + ",".join(self) + "}"
def invert(self):
res = MultiNodeSet()
for node, kind in self:
if kind & TRUE_OR_FALSE:
res.insert_node(node, kind ^ TRUE_OR_FALSE)
else:
res.insert_node(node, kind)
return res
class BlockStack(list):
'''A stack of blocks (loops or tries).'''
def push_block(self):
self.append(EMPTY)
def pop_block(self):
return self.pop()
def add(self, node_set):
self[-1] = self[-1] + node_set
class FlowScope(object):
def __init__(self, depth, ast_scope):
self.entry = FlowNode(ast_scope)
self.graph = graph.FlowGraph(self.entry)
self.exceptional_exit = FlowNode(ast_scope)
self.graph.add_node(self.exceptional_exit)
self.graph.annotate_node(self.exceptional_exit, EXCEPTION_EXIT)
self.depth = depth
self.exception_stack = BlockStack()
self.exception_stack.push_block()
self.breaking_stack = BlockStack()
self.continuing_stack = BlockStack()
self.return_stack = BlockStack()
self.return_stack.push_block()
self.ast_scope = ast_scope
def inner(self, ast_scope):
return FlowScope(self.depth+1, ast_scope)
def pop_exceptions(self):
return self.exception_stack.pop_block()
def split(self):
splitter.do_split(self.ast_scope, self.graph)
def prune(self):
#Remove the always false condition edges.
pruner.do_pruning(self.ast_scope, self.graph)
def unroll(self):
unroller.do_unrolling(self.ast_scope, self.graph)
def write_graph(self, writer):
self.graph.delete_unreachable_nodes()
#Emit flow graph
self._write_flow_nodes(writer)
for pred, succ, kind in self.graph.edges():
write_successors(writer, pred, succ, kind)
if kind != NORMAL and kind != EXHAUSTED:
write_successors(writer, pred, succ, NORMAL)
#Emit idoms
for node, idom in self.graph.idoms():
write_idoms(writer, node, idom)
#Emit SSA variables
for var in self.graph.ssa_variables():
write_ssa_var(writer, var)
for node, var in self.graph.ssa_definitions():
write_ssa_defn(writer, var, node)
for node, var in self.graph.ssa_uses():
write_ssa_use(writer, node, var)
for var, arg in self.graph.ssa_phis():
write_ssa_phi(writer, var, arg)
def _write_flow_nodes(self, writer):
blocks = self.graph.get_basic_blocks()
for flow, note in self.graph.nodes():
if note is not None:
write_scope_node(writer, flow, self.ast_scope, note)
if flow in blocks:
head, index = blocks[flow]
write_flow_node(writer, flow, head, index)
#Codes for scope entry/exit nodes.
#These are hardcoded in QL. Do not change them.
FALL_THROUGH_EXIT = 0
EXCEPTION_EXIT = 1
RETURN_EXIT = 2
ENTRY = -1
class FlowPass(Pass):
'''Extracts flow-control information. Currently generates a flow control
graph. There is a many-to-one relation between flow-nodes and ast nodes.
This enables precise flow control for 'try' statements.
Each flow node also has a number. If there are several flow nodes for
one ast node, they will all have different numbers.
For flow nodes representing a scope (class, function or module) then
the numbers are as follows: entry=-1, exceptional exit=1,
fallthrough exit=0, explicit return=2
'''
name = "flow"
def __init__(self, split, prune=True, unroll=False, logger:Optional[Logger] = None):
'Initialize all the tree walkers'
self._walkers = {
list : self._walk_list,
bool : self.skip,
int : self.skip,
float : self.skip,
bytes : self.skip,
str : self.skip,
complex : self.skip,
type(None) : self.skip,
ast.Lambda : self._walk_scope_defn,
ast.ClassExpr : self._walk_class_expr,
ast.FunctionExpr : self._walk_scope_defn,
ast.For : self._walk_for_loop,
ast.Pass : self._walk_stmt_only,
ast.Global : self._walk_stmt_only,
ast.Break : self._walk_break,
ast.BinOp : self._walk_binop,
ast.Compare : self._walk_compare,
ast.Continue : self._walk_continue,
ast.Raise : self._walk_raise,
ast.Return : self._walk_return,
ast.Delete : self._walk_delete,
ast.While : self._walk_while,
ast.If : self._walk_if_stmt,
ast.IfExp : self._walk_if_expr,
ast.expr_context : self.skip,
ast.Slice : self._walk_slice,
ast.ExceptStmt : error,
ast.comprehension : error,
ast.ListComp: self._walk_generator,
ast.SetComp: self._walk_generator,
ast.DictComp: self._walk_generator,
ast.Dict : self._walk_dict,
ast.keyword : self._walk_expr_no_raise,
ast.KeyValuePair : self._walk_keyword,
ast.DictUnpacking : self._walk_yield,
ast.Starred : self._walk_yield,
ast.arguments : self._walk_arguments,
ast.Name : self._walk_name,
ast.PlaceHolder : self._walk_name,
ast.Num : self._walk_atom,
ast.Str : self._walk_atom,
ast.Try : self._walk_try,
ast.List : self._walk_sequence,
ast.Tuple : self._walk_sequence,
ast.UnaryOp : self._walk_expr_no_raise,
ast.UnaryOp : self._walk_unary_op,
ast.Assign : self._walk_assign,
ast.ImportExpr : self._walk_import_expr,
ast.ImportMember : self._walk_expr,
ast.Ellipsis : self._walk_atom,
ast.Print : self._walk_post_stmt,
ast.alias : self._walk_alias,
ast.GeneratorExp: self._walk_generator,
ast.Assert: self._walk_assert,
ast.AssignExpr: self._walk_assignexpr,
ast.AugAssign : self._walk_augassign,
ast.Attribute : self._walk_attribute,
ast.Subscript : self._walk_subscript,
ast.BoolOp : self._walk_bool_expr,
ast.TemplateWrite : self._walk_post_stmt,
ast.Filter : self._walk_expr_no_raise,
ast.Yield : self._walk_yield,
ast.YieldFrom : self._walk_yield,
ast.Expr : self._walk_skip_stmt,
ast.Import : self._walk_skip_stmt,
ast.ImportFrom : self._walk_post_stmt,
ast.With: self._walk_with,
ast.Match: self._walk_match,
ast.Case: self._walk_case,
ast.Repr : self._walk_expr_no_raise,
ast.Nonlocal : self._walk_stmt_only,
ast.Exec : self._walk_exec,
ast.AnnAssign : self._walk_ann_assign,
ast.TypeAlias : self._walk_stmt_only,
ast.TypeVar: self.skip,
ast.TypeVarTuple: self.skip,
ast.ParamSpec: self.skip,
ast.SpecialOperation: self._walk_expr_no_raise,
ast.Module : error,
ast.expr : error,
ast.stmt : error,
ast.cmpop : error,
ast.boolop : error,
ast.operator : error,
ast.expr_context : error,
ast.unaryop : error,
ast.AstBase : error,
}
for t in ast.__dict__.values():
if isinstance(t, type) and ast.AstBase in t.__mro__:
#Setup walkers
expr_walker = self._walk_expr
if t.__mro__[1] is ast.expr:
if t not in self._walkers:
self._walkers[t] = expr_walker
elif t.__mro__[1] in (ast.cmpop, ast.boolop, ast.operator,
ast.expr_context, ast.unaryop):
self._walkers[t] = self.skip
self._walkers[ast.TemplateDottedNotation] = self._walkers[ast.Attribute]
# Initialize walkers for patterns,
# These return both a tree and a list of nodes:
# - the tree represents the computation needed to evaluate whether the pattern matches,
# - the list of nodes represents the bindings resulting from a successful match.
self._pattern_walkers = {
ast.MatchAsPattern: self._walk_as_pattern,
ast.MatchOrPattern: self._walk_or_pattern,
ast.MatchLiteralPattern: self._walk_literal_pattern,
ast.MatchCapturePattern: self._walk_capture_pattern,
ast.MatchWildcardPattern: self._walk_wildcard_pattern,
ast.MatchValuePattern: self._walk_value_pattern,
ast.MatchSequencePattern: self._walk_sequence_pattern,
ast.MatchStarPattern: self._walk_star_pattern,
ast.MatchMappingPattern: self._walk_mapping_pattern,
ast.MatchDoubleStarPattern: self._walk_double_star_pattern,
ast.MatchKeyValuePattern: self._walk_key_value_pattern,
ast.MatchClassPattern: self._walk_class_pattern,
ast.MatchKeywordPattern: self._walk_keyword_pattern,
}
self.scope = None
self.in_try = 0
self.in_try_name = 0
self.split = split
self.prune = prune
self.unroll = unroll
self.logger = logger or Logger()
self.filename = "<unknown>"
#Entry point to the tree walker
def extract(self, ast, writer):
if ast is None:
return
self.writer = writer
self._walk_scope(ast)
def set_filename(self, filename):
self.filename = filename
#Walkers
def _walk_arguments(self, node, predecessors):
for _, _, child_node in iter_fields(node):
predecessors = self._walk(child_node, predecessors)
return predecessors
def _walk_generator(self, node, predecessors):
res = self._walk(node.iterable, predecessors)
res = self.add_successor(res, node)
raises = self._walk_scope(node.function)
if raises:
self._raise_exception(res)
return res
def _walk_comprehension(self, node, predecessors):
return self._walk_generators(node, node.generators, predecessors)
def _walk_generators(self, node, generators, predecessors):
if not generators:
if isinstance(node, ast.DictComp):
predecessors = self.add_successor(predecessors, node.value)
predecessors = self.add_successor(predecessors, node.key)
else:
predecessors = self.add_successor(predecessors, node.elt)
return predecessors
else:
gen = generators[0]
predecessors = self._walk(gen.iter, predecessors)
predecessors = self.add_successor(predecessors, gen)
loop_node = predecessors.unique_node()
predecessors = self._walk(gen.target, predecessors)
skip = EMPTY
for test in gen.ifs:
predecessors = self._walk(test, predecessors)
true_nodes, false_nodes = predecessors.branch()
predecessors += true_nodes
skip += false_nodes
predecessors = self._walk_generators(node, generators[1:], predecessors)
predecessors += skip
self.add_successor_node(predecessors, loop_node)
return predecessors
def _walk_if_expr(self, node, predecessors):
test_successors = self._walk(node.test, predecessors)
true_successors, false_successors = test_successors.branch()
body_successors = self._walk(node.body, true_successors)
orelse_successors = self._walk(node.orelse, false_successors)
predecessors = body_successors + orelse_successors
predecessors = self.add_successor(predecessors, node)
return predecessors
def _walk_dict(self, node, predecessors):
for item in node.items:
predecessors = self._walk(item, predecessors)
return self.add_successor(predecessors, node)
def _walk_alias(self, node, predecessors):
predecessors = self._walk(node.value, predecessors)
return self._walk(node.asname , predecessors)
def _walk_slice(self, node, predecessors):
predecessors = self._walk(node.start, predecessors)
predecessors = self._walk(node.stop, predecessors)
predecessors = self._walk(node.step, predecessors)
return self.add_successor(predecessors, node)
def _walk_break(self, node, predecessors):
#A break statement counts as an exit to the enclosing loop statement
predecessors = self.add_successor(predecessors, node)
# In well formed code, there should always be an element in the breaking stack, but because
# our parser accepts code where `break` appears outside of a loop, we must check for this
# case.
if self.scope.breaking_stack:
self.scope.breaking_stack.add(predecessors)
#Provide no predecessors to following statement
return EMPTY
def _walk_continue(self, node, predecessors):
#A continue statement counts as an exit to the following orelse
predecessors = self.add_successor(predecessors, node)
# In well formed code, there should always be an element in the continuing stack, but
# because our parser accepts code where `continue` appears outside of a loop, we must check
# for this case.
if self.scope.continuing_stack:
self.scope.continuing_stack.add(predecessors)
#Provide no predecessors to following statement
return EMPTY
def _raise_exception(self, predecessors):
predecessors = predecessors.exception()
self.scope.exception_stack.add(predecessors)
def _walk_raise(self, node, predecessors):
for _, _, child_node in iter_fields(node):
predecessors = self._walk(child_node, predecessors)
predecessors = self.add_successor(predecessors, node)
self._raise_exception(predecessors)
return EMPTY
def _walk_return(self, node, predecessors):
for _, _, child_node in iter_fields(node):
predecessors = self._walk(child_node, predecessors)
predecessors = self.add_successor(predecessors, node)
self.scope.return_stack.add(predecessors)
return EMPTY
def _walk_delete(self, node, predecessors):
'''The CFG for the delete statement `del a, b`
looks like `a -> del -> b -> del` to ensure that
the implied use occurs before the deletion and that
`del x, x` has the correct semantics.'''
for item in node.targets:
predecessors = self._walk(item, predecessors)
predecessors = self.add_successor(predecessors, node)
return predecessors
def _walk_stmt_only(self, node, predecessors):
return self.add_successor(predecessors, node)
def _walk_scope(self, scope_node):
'''Returns: whether this scope raises an exception (or not)'''
prev_flow_scope = self.scope
if prev_flow_scope is None:
self.scope = FlowScope(0, scope_node)
else:
self.scope = prev_flow_scope.inner(scope_node)
predecessors = SingletonNodeSet(self.scope.entry, NORMAL)
for _, _, child_node in iter_fields(scope_node):
predecessors = self._walk(child_node, predecessors)
implicit_exit = self.add_successor(predecessors, scope_node).unique_node()
self.scope.graph.annotate_node(implicit_exit, FALL_THROUGH_EXIT)
if isinstance(scope_node, (ast.Module, ast.Class)):
self.scope.graph.use_all_defined_variables(implicit_exit)
#Mark all nodes that raise unhandled exceptions.
exceptions = self.scope.pop_exceptions()
for node, kind in exceptions:
if kind == NORMAL or kind == EXCEPTION:
self.scope.graph.annotate_node(node, EXCEPTION_EXIT)
else:
self.scope.graph.add_edge(node, self.scope.exceptional_exit)
self.scope.graph.annotate_edge(node, self.scope.exceptional_exit, kind)
self.scope.graph.annotate_node(self.scope.entry, ENTRY)
if not isinstance(scope_node, ast.Module):
returns = self.scope.return_stack.pop_block()
return_exit = self.add_successor(returns, scope_node).unique_node()
self.scope.graph.annotate_node(return_exit, RETURN_EXIT)
if self.split:
try:
self.scope.split()
# we found a regression in the split logic, where in some scenarios a split head would not be in the subgraph.
# Instead of aborting extracting the whole file, we can continue and just not split the graph.
# see semmlecode-python-tests/extractor-tests/splitter-regression/failure.py
except AssertionError:
self.logger.warning("Failed to split in " + self.filename + ", continuing anyway")
if self.prune:
self.scope.prune()
if self.unroll:
self.scope.unroll()